From 9bb138a00657ae40ee436e6538ecf8131baa944d Mon Sep 17 00:00:00 2001 From: morfize <233522679+morfize@users.noreply.github.com> Date: Wed, 18 Mar 2026 21:41:35 -0700 Subject: [PATCH 1/8] feat: complete perp-bot with TUI dashboard, IPC layer, and full trading system Hyperliquid mean-reversion perpetual futures trading bot with: - Signal engine (Z-score, Bollinger, RSI, ADX) with prediction market regime modifiers - Paper and live execution with limit-first/taker-fallback strategy - Risk management (stop-loss, daily limits, cooldowns, position sizing) - Backtesting with realistic cost model and walk-forward analysis - Unix socket IPC for daemon-TUI communication - Textual TUI dashboard with live monitoring and key-bound commands - 167 tests passing, zero lint errors Co-Authored-By: Claude Opus 4.6 (1M context) --- .env.example | 10 + .gitignore | 21 + .python-version | 1 + CLAUDE.md | 81 ++ README.md | 0 config.yaml | 82 ++ deploy/deploy.sh | 39 + deploy/perp-bot.service | 29 + hyperliquid-mean-reversion-bot-design.md | 400 +++++++ main.py | 792 +++++++++++++ pyproject.toml | 48 + scratch.py | 0 src/perp_bot/__init__.py | 0 src/perp_bot/backtest/__init__.py | 23 + src/perp_bot/backtest/config.py | 5 + src/perp_bot/backtest/cost_model.py | 90 ++ src/perp_bot/backtest/engine.py | 221 ++++ src/perp_bot/backtest/executor.py | 113 ++ src/perp_bot/backtest/metrics.py | 137 +++ src/perp_bot/backtest/results.py | 115 ++ src/perp_bot/backtest/risk_adapter.py | 82 ++ src/perp_bot/backtest/sensitivity.py | 120 ++ src/perp_bot/backtest/walk_forward.py | 98 ++ src/perp_bot/config.py | 174 +++ src/perp_bot/data/__init__.py | 0 src/perp_bot/data/client.py | 124 +++ src/perp_bot/data/db.py | 253 +++++ src/perp_bot/data/ingest.py | 104 ++ src/perp_bot/data/prediction_client.py | 104 ++ src/perp_bot/data/ws_client.py | 220 ++++ src/perp_bot/execution/__init__.py | 0 src/perp_bot/execution/executor.py | 61 + src/perp_bot/execution/live_executor.py | 563 ++++++++++ src/perp_bot/infra/__init__.py | 0 src/perp_bot/infra/alerts.py | 49 + src/perp_bot/infra/health.py | 79 ++ src/perp_bot/infra/logging.py | 41 + src/perp_bot/ipc/__init__.py | 1 + src/perp_bot/ipc/client.py | 80 ++ src/perp_bot/ipc/protocol.py | 22 + src/perp_bot/ipc/server.py | 177 +++ src/perp_bot/ipc/state.py | 60 + src/perp_bot/reporting/__init__.py | 0 src/perp_bot/reporting/compare.py | 145 +++ src/perp_bot/reporting/weekly.py | 89 ++ src/perp_bot/risk/__init__.py | 0 src/perp_bot/risk/manager.py | 113 ++ src/perp_bot/signals/__init__.py | 0 src/perp_bot/signals/engine.py | 164 +++ src/perp_bot/signals/indicators.py | 136 +++ src/perp_bot/signals/prediction.py | 92 ++ src/perp_bot/tui/__init__.py | 1 + src/perp_bot/tui/app.py | 210 ++++ src/perp_bot/tui/app.tcss | 47 + src/perp_bot/tui/widgets/__init__.py | 1 + src/perp_bot/tui/widgets/header.py | 62 ++ src/perp_bot/tui/widgets/log.py | 60 + src/perp_bot/tui/widgets/position.py | 63 ++ src/perp_bot/tui/widgets/risk.py | 58 + src/perp_bot/tui/widgets/signals.py | 67 ++ src/perp_bot/tui/widgets/trades.py | 28 + tests/__init__.py | 0 tests/test_alerts.py | 49 + tests/test_backtest_cost_model.py | 112 ++ tests/test_backtest_engine.py | 280 +++++ tests/test_backtest_metrics.py | 148 +++ tests/test_indicators.py | 127 +++ tests/test_ipc.py | 196 ++++ tests/test_live_executor.py | 375 +++++++ tests/test_prediction.py | 416 +++++++ tests/test_risk.py | 117 ++ tests/test_signals.py | 82 ++ tests/test_tui.py | 63 ++ tests/test_ws_client.py | 182 +++ uv.lock | 1293 ++++++++++++++++++++++ 75 files changed, 9395 insertions(+) create mode 100644 .env.example create mode 100644 .gitignore create mode 100644 .python-version create mode 100644 CLAUDE.md create mode 100644 README.md create mode 100644 config.yaml create mode 100755 deploy/deploy.sh create mode 100644 deploy/perp-bot.service create mode 100644 hyperliquid-mean-reversion-bot-design.md create mode 100644 main.py create mode 100644 pyproject.toml create mode 100644 scratch.py create mode 100644 src/perp_bot/__init__.py create mode 100644 src/perp_bot/backtest/__init__.py create mode 100644 src/perp_bot/backtest/config.py create mode 100644 src/perp_bot/backtest/cost_model.py create mode 100644 src/perp_bot/backtest/engine.py create mode 100644 src/perp_bot/backtest/executor.py create mode 100644 src/perp_bot/backtest/metrics.py create mode 100644 src/perp_bot/backtest/results.py create mode 100644 src/perp_bot/backtest/risk_adapter.py create mode 100644 src/perp_bot/backtest/sensitivity.py create mode 100644 src/perp_bot/backtest/walk_forward.py create mode 100644 src/perp_bot/config.py create mode 100644 src/perp_bot/data/__init__.py create mode 100644 src/perp_bot/data/client.py create mode 100644 src/perp_bot/data/db.py create mode 100644 src/perp_bot/data/ingest.py create mode 100644 src/perp_bot/data/prediction_client.py create mode 100644 src/perp_bot/data/ws_client.py create mode 100644 src/perp_bot/execution/__init__.py create mode 100644 src/perp_bot/execution/executor.py create mode 100644 src/perp_bot/execution/live_executor.py create mode 100644 src/perp_bot/infra/__init__.py create mode 100644 src/perp_bot/infra/alerts.py create mode 100644 src/perp_bot/infra/health.py create mode 100644 src/perp_bot/infra/logging.py create mode 100644 src/perp_bot/ipc/__init__.py create mode 100644 src/perp_bot/ipc/client.py create mode 100644 src/perp_bot/ipc/protocol.py create mode 100644 src/perp_bot/ipc/server.py create mode 100644 src/perp_bot/ipc/state.py create mode 100644 src/perp_bot/reporting/__init__.py create mode 100644 src/perp_bot/reporting/compare.py create mode 100644 src/perp_bot/reporting/weekly.py create mode 100644 src/perp_bot/risk/__init__.py create mode 100644 src/perp_bot/risk/manager.py create mode 100644 src/perp_bot/signals/__init__.py create mode 100644 src/perp_bot/signals/engine.py create mode 100644 src/perp_bot/signals/indicators.py create mode 100644 src/perp_bot/signals/prediction.py create mode 100644 src/perp_bot/tui/__init__.py create mode 100644 src/perp_bot/tui/app.py create mode 100644 src/perp_bot/tui/app.tcss create mode 100644 src/perp_bot/tui/widgets/__init__.py create mode 100644 src/perp_bot/tui/widgets/header.py create mode 100644 src/perp_bot/tui/widgets/log.py create mode 100644 src/perp_bot/tui/widgets/position.py create mode 100644 src/perp_bot/tui/widgets/risk.py create mode 100644 src/perp_bot/tui/widgets/signals.py create mode 100644 src/perp_bot/tui/widgets/trades.py create mode 100644 tests/__init__.py create mode 100644 tests/test_alerts.py create mode 100644 tests/test_backtest_cost_model.py create mode 100644 tests/test_backtest_engine.py create mode 100644 tests/test_backtest_metrics.py create mode 100644 tests/test_indicators.py create mode 100644 tests/test_ipc.py create mode 100644 tests/test_live_executor.py create mode 100644 tests/test_prediction.py create mode 100644 tests/test_risk.py create mode 100644 tests/test_signals.py create mode 100644 tests/test_tui.py create mode 100644 tests/test_ws_client.py create mode 100644 uv.lock diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..277f325 --- /dev/null +++ b/.env.example @@ -0,0 +1,10 @@ +# Hyperliquid wallet private key (for signed exchange requests in Phase 4) +HL_PRIVATE_KEY= +# Hyperliquid wallet address +HL_WALLET_ADDRESS= +# Discord webhook URL for alerts (optional) +DISCORD_WEBHOOK_URL= +# Telegram Bot API token (optional) +TELEGRAM_BOT_TOKEN= +# Telegram chat ID for alerts (optional) +TELEGRAM_CHAT_ID= diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b33e3e7 --- /dev/null +++ b/.gitignore @@ -0,0 +1,21 @@ +# Python-generated files +__pycache__/ +*.py[oc] +build/ +dist/ +wheels/ +*.egg-info + +# Virtual environments +.venv + +# Secrets +.env + +# Data +*.db +*.sqlite +data/*.csv + +# Tool caches +.ruff_cache/ diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..6324d40 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.14 diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..77152eb --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,81 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +Hyperliquid perpetual futures mean-reversion trading bot. Designed for small capital (~$670, 3x leverage) day-trading on Hyperliquid's on-chain CLOB DEX. The full design specification is in `hyperliquid-mean-reversion-bot-design.md` (Japanese). + +## Tech Stack + +- **Language**: Python 3.12+ +- **Exchange SDK**: `hyperliquid-python-sdk` + `websockets` +- **Data store**: SQLite (initial) -> DuckDB (analysis) +- **Indicators**: Self-implemented with pandas/numpy (no ta-lib dependency — intentional for full logic transparency) +- **Deployment**: GCP Compute Engine (e2-small), systemd service +- **Alerts**: Discord Webhook or Telegram Bot API +- **Logging**: Python `logging` module, JSON-structured + +## Architecture (Planned) + +Seven modules: + +| Module | Responsibility | +|---|---| +| **Data Layer** | REST + WebSocket data ingestion (`WsClient` for real-time mid prices, candles, BBO), SQLite persistence, OHLCV candles (1m/5m/15m/1h), prediction market snapshots | +| **Signal Engine** | Z-Score, Bollinger Bands, RSI, ADX calculations. Entry signal = all 4 indicators agree + ADX regime filter. Prediction market regime modifiers | +| **Prediction Layer** | Polymarket (war risk) + Kalshi (Fed rate) polling, regime classification (NORMAL/HIGH_RISK/DOVISH/HAWKISH/CRISIS), funding side preference | +| **Execution Layer** | `PaperExecutor` (DB-only) + `LiveExecutor` (real orders via Exchange API). Limit-first with taker-fallback, atomic server-side SL attachment, crash recovery via exchange position query | +| **Risk Manager** | Stop-loss (3% per trade), daily loss limit (8%), max 1 position, 30min cooldown after SL, 50% margin usage cap, regime-based position sizing | +| **Backtester** | Fee model (Maker 0.015%/Taker 0.045%), slippage sim, walk-forward analysis for overfitting prevention | +| **Infrastructure** | Structured logging (stdout + file), alert dispatch, health checks | +| **IPC Layer** | Unix socket daemon state server (`DaemonStateServer`), client (`DaemonClient`), thread-safe `DaemonState` container. Socket carries volatile state; SQLite WAL provides concurrent read access for persistent data | +| **TUI Dashboard** | Textual-based terminal UI (`PerpBotApp`). Attaches/detaches freely from daemon. Panels: header, position, signals, risk, trades, log. Key bindings: pause/resume/emergency close | + +## Key Trading Logic + +- **Entry**: Z-score > ±2.0 AND RSI overbought/oversold AND price outside Bollinger Band AND ADX < 25 (range-bound regime) +- **Exit (profit)**: Z-score returns to ±0.3 (mean reversion complete) +- **Exit (stop)**: Z-score exceeds ±3.0 against position, OR 3% capital loss, OR 24h timeout +- **Order strategy**: Limit orders (Maker) preferred; fallback to market (Taker) if unfilled. Server-side stop orders on Hyperliquid for redundancy. + +## Development Phases + +1. **Data Pipeline** — OHLCV + funding rate ingestion into SQLite, WebSocket real-time feed +2. **Signal Engine + Backtester** — Indicator modules, backtest engine with realistic cost model, walk-forward overfitting checks +3. **Risk Management + Paper Trading** — Risk modules, paper-trade mode, validate against backtest results +4. **Live Trading** — Signed exchange API, GCP deploy, staged rollout (start with 20% of capital) + +## Hyperliquid API Notes + +- Gas-free: no gas costs for order placement/modification/cancellation +- Funding rate: settled hourly (1/8 of 8h rate each hour) +- REST info endpoint: `POST /info` with varying `type` field (metaAndAssetCtxs, candleSnapshot, fundingHistory, clearinghouseState) +- Exchange endpoint: `POST /exchange` (signed) +- Known risk: API server overload incidents have caused 30min+ downtime — always pair bot-side SL with server-side stop orders + +## Commands + +```bash +uv sync # Install dependencies +uv run pytest tests/ -v # Run all tests +uv run pytest tests/test_indicators.py -v # Run a single test file +uv run ruff check src/ tests/ # Lint +uv run python main.py backfill # Backfill historical data from Hyperliquid +uv run python main.py trade # Start trading loop (paper mode by default, set mode: "live" in config.yaml for real orders) +uv run python main.py backfill-predictions # Fetch current prediction market snapshots +uv run python main.py backtest # Run backtest over historical data +uv run python main.py walkforward # Walk-forward overfitting analysis +uv run python main.py sensitivity # Parameter sensitivity sweep +uv run python main.py screen # Screen symbols by Hurst exponent for mean-reversion fit +uv run python main.py review --weeks 1 # Weekly performance report +uv run python main.py compare --days 7 # Compare paper trades vs backtest over same period +uv run python main.py tui # Launch TUI dashboard (attach to running daemon) +uv run python main.py status # One-shot daemon state query (JSON output) +``` + +## Conventions + +- Design doc and code comments may be in Japanese +- All indicator calculations are self-implemented (do not introduce ta-lib or similar libraries) +- Backtest must include: trading fees, slippage, funding costs, and execution delay simulation diff --git a/README.md b/README.md new file mode 100644 index 0000000..e69de29 diff --git a/config.yaml b/config.yaml new file mode 100644 index 0000000..5111562 --- /dev/null +++ b/config.yaml @@ -0,0 +1,82 @@ +# ===== Trading Parameters ===== +trading: + symbols: ["ETH"] + leverage: 3 + capital_usd: 670.0 + margin_usage_limit: 0.5 # max 50% of margin + +# ===== Signal Parameters ===== +signals: + zscore_lookback: 20 + zscore_entry_threshold: 2.0 + zscore_exit_threshold: 0.3 + zscore_stop_threshold: 3.0 + bollinger_period: 20 + bollinger_std: 2.0 + rsi_period: 14 + rsi_overbought: 70 + rsi_oversold: 30 + adx_period: 14 + adx_threshold: 25 # below this = range-bound (trade allowed) + +# ===== Risk Management ===== +risk: + max_loss_per_trade_pct: 0.03 # 3% of capital + daily_loss_limit_pct: 0.08 # 8% of capital + max_positions: 1 + cooldown_seconds: 1800 # 30 min after stop-loss + position_timeout_hours: 24 + +# ===== Data ===== +data: + timeframes: ["1m", "5m", "15m", "1h"] + primary_timeframe: "15m" + history_days: 90 + db_path: "perp_bot.db" + +# ===== Execution ===== +execution: + order_type: "limit" # limit-first strategy + taker_fallback_seconds: 30 # switch to market after this + use_server_side_stop: true + +# ===== Prediction Markets ===== +prediction: + enabled: true + poll_interval_minutes: 15 + war_risk_threshold: 0.4 # above this → HIGH_RISK regime + war_risk_crisis_threshold: 0.7 # above this → CRISIS regime + rate_change_threshold: 0.3 # magnitude above this → DOVISH/HAWKISH + position_size_reduction: 0.5 # multiply size by this in HIGH_RISK + markets: + - slug: "iran_conflict" + source: "polymarket" + market_id: "" # replace with actual Polymarket condition ID + category: "war_risk" + weight: 0.6 + - slug: "ukraine_escalation" + source: "polymarket" + market_id: "" # replace with actual Polymarket condition ID + category: "war_risk" + weight: 0.4 + - slug: "fed_rate_next_fomc" + source: "kalshi" + market_id: "FED-..." # replace with actual Kalshi ticker + category: "rate_change" + weight: 1.0 + +# ===== Backtest ===== +backtest: + maker_fee_rate: 0.00015 # 0.015% maker + taker_fee_rate: 0.00045 # 0.045% taker + slippage_min_pct: 0.0001 # 0.01% min slippage + slippage_max_pct: 0.0005 # 0.05% max slippage + entry_delay_candles: 1 # 1-candle execution delay + cancel_if_signal_gone: true # cancel pending order if signal disappears + train_days: 60 # walk-forward train window + test_days: 15 # walk-forward test window + step_days: 15 # walk-forward step size + seed: 42 # reproducible slippage + +# ===== Mode ===== +mode: "paper" # paper | live diff --git a/deploy/deploy.sh b/deploy/deploy.sh new file mode 100755 index 0000000..393a9e7 --- /dev/null +++ b/deploy/deploy.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Deploy perp-bot to a GCP Compute Engine instance. +# Usage: ./deploy.sh [zone] +# +# Prerequisites: +# - gcloud CLI authenticated +# - SSH access to the instance +# - uv installed on the remote machine + +INSTANCE="${1:?Usage: deploy.sh [zone]}" +ZONE="${2:-us-central1-a}" +REMOTE_DIR="/opt/perp-bot" + +echo "==> Syncing code to ${INSTANCE}:${REMOTE_DIR}" +gcloud compute scp --recurse --zone="${ZONE}" \ + --exclude='.git,__pycache__,.venv,*.db,.env,node_modules' \ + . "${INSTANCE}:${REMOTE_DIR}" + +echo "==> Installing dependencies" +gcloud compute ssh --zone="${ZONE}" "${INSTANCE}" -- bash -c " + cd ${REMOTE_DIR} + uv sync --frozen +" + +echo "==> Installing systemd service" +gcloud compute ssh --zone="${ZONE}" "${INSTANCE}" -- sudo bash -c " + cp ${REMOTE_DIR}/deploy/perp-bot.service /etc/systemd/system/ + systemctl daemon-reload + systemctl enable perp-bot + systemctl restart perp-bot +" + +echo "==> Checking service status" +gcloud compute ssh --zone="${ZONE}" "${INSTANCE}" -- \ + sudo systemctl status perp-bot --no-pager + +echo "==> Deploy complete" diff --git a/deploy/perp-bot.service b/deploy/perp-bot.service new file mode 100644 index 0000000..0d4c6e7 --- /dev/null +++ b/deploy/perp-bot.service @@ -0,0 +1,29 @@ +[Unit] +Description=Hyperliquid Mean Reversion Trading Bot +After=network-online.target +Wants=network-online.target + +[Service] +Type=simple +User=perp-bot +Group=perp-bot +WorkingDirectory=/opt/perp-bot +EnvironmentFile=/opt/perp-bot/.env +ExecStart=/opt/perp-bot/.venv/bin/python main.py trade +Restart=always +RestartSec=10 +WatchdogSec=300 + +# Security hardening +NoNewPrivileges=true +ProtectSystem=strict +ProtectHome=true +ReadWritePaths=/opt/perp-bot + +# Logging +StandardOutput=journal +StandardError=journal +SyslogIdentifier=perp-bot + +[Install] +WantedBy=multi-user.target diff --git a/hyperliquid-mean-reversion-bot-design.md b/hyperliquid-mean-reversion-bot-design.md new file mode 100644 index 0000000..f8a5749 --- /dev/null +++ b/hyperliquid-mean-reversion-bot-design.md @@ -0,0 +1,400 @@ +# Hyperliquid ミーンリバージョン Bot 設計書 + +## 1. エグゼクティブサマリー + +Hyperliquid の Perpetual 市場において、統計的な平均回帰(Mean Reversion)を利用したデイトレードbotを設計・運用する。小資金(〜$670)で3倍レバレッジを用い、リスクを制御しつつリターンを追求する。 + +| 項目 | 内容 | +|---|---| +| 市場 | Hyperliquid Perpetual | +| 戦略 | ミーンリバージョン(平均回帰・逆張り) | +| 時間軸 | 分〜時間単位(デイトレード) | +| 資金規模 | 〜$670(約10万円) | +| レバレッジ | 3倍 → 有効ポジション 〜$2,010 | +| 実装言語 | Python | +| 実行環境 | GCP Compute Engine | +| 目的 | 本番運用(収益追求) | + +--- + +## 2. 戦略選定の経緯 + +### 2.1 検討した戦略と棄却理由 + +| 戦略 | 評価 | 棄却理由 | +|---|---|---| +| ファンディングレート裁定 | 低リスク・低リターン | $670では手数料負けする。ベースレート0.00125%/hでは往復手数料($0.37〜0.77)の回収に22〜88時間かかり、月$10〜15が上限。資金$5,000以上で初めて実用的 | +| グリッドbot | 中リスク・中リターン | シンプルだがレンジ崩壊時のドローダウンが大きい。相場観不要だが「知的に面白い」軸でミーンリバージョンを選択 | +| マーケットメイキング | 高難度 | 在庫リスク管理が複雑。実装難易度が最も高く、小資金での運用は非現実的 | +| Meme coin | 高リスク・非対称リターン | 小資金との相性は良いが、rug pullリスクが高く「着実な運用」とは相容れない | +| Prediction market | — | デイトレードbotとの相性が低い。取引機会が限定的 | + +### 2.2 ミーンリバージョンを選択した理由 + +- **統計的検証可能性**: バックテストでエッジの有無を事前に数字で確認できる +- **知的深度**: 価格の自己相関構造、分布特性、レジーム検出など、複雑系・統計物理との接点がある +- **リターンポテンシャル**: 同じ資金・レバレッジでファンディング裁定の5〜10倍のリターンが狙える +- **リスク制御可能性**: ストップロスとレジームフィルターで最大損失を事前に定義できる + +--- + +## 3. Hyperliquid プラットフォーム概要 + +### 3.1 基本仕様 + +- オンチェーン CLOB(Central Limit Order Book)型の Perp DEX +- ブロック確定 <1秒(HyperBFT コンセンサス) +- **ガス代ゼロ**:注文の発注・変更・キャンセルにガス代は発生しない +- 100+ のPerp銘柄、47+ のSpot銘柄 +- 最大レバレッジ50倍(銘柄による) + +### 3.2 手数料体系 + +ベースティア(Tier 0、14日間加重出来高による段階制): + +| 種別 | Taker | Maker | +|---|---|---| +| Perp | 0.045% | 0.015% | +| Spot | 0.070% | 0.040% | + +- ガス代は一切なし +- Maker rebate は取引ごとにウォレットに直接支払い +- 14日間のperp出来高 + 2× spot出来高 でティア判定 +- HYPE ステーキングで5〜40%の追加割引あり + +**本botへの影響**: Perp市場のみ使用。Maker注文(limit order)を基本とし、往復コストを $2,010 × 0.015% × 2 = **$0.60** に抑える設計。Taker使用時は $2,010 × 0.045% × 2 = **$1.81**。 + +### 3.3 ファンディングレート + +- 1時間ごとに精算(8時間レートの1/8を毎時支払い) +- ベース金利: 0.01%/8h(固定、ショート側に支払い) +- プレミアム: 契約価格とオラクル価格の乖離に基づき変動 +- 上限: 4%/h + +**本botへの影響**: ポジション保有中のファンディング支払い/受取はP&L計算に含める必要がある。ショートポジション保有時はファンディング受取となる場合が多く、微小だがプラス要因。 + +### 3.4 プラットフォームリスク + +- 2025年7月: APIサーバー過負荷により30分超のダウンタイム発生。ポジションクローズ不能 +- 対策: ストップロスはサーバーサイド(Hyperliquidのstop order機能)にも設置し、bot側のストップロスと二重化する + +--- + +## 4. 戦略詳細設計 + +### 4.1 コアロジック + +価格が統計的な平均から有意に乖離した際に逆張りエントリーし、平均への回帰で利益を取る。 + +``` +if Z-score > +2.0 AND RSI > 70 AND price > BB_upper AND ADX < 25: + → SHORT entry + +if Z-score < -2.0 AND RSI < 30 AND price < BB_lower AND ADX < 25: + → LONG entry + +if abs(Z-score) < 0.3: + → CLOSE(利確:平均回帰完了) + +if Z-score exceeds ±3.0 against position: + → CLOSE(ストップロス) +``` + +### 4.2 シグナル指標 + +| 指標 | 役割 | パラメータ(初期値) | +|---|---|---| +| Z-Score | 平均からの乖離度(メインシグナル) | lookback: 20期間 | +| Bollinger Bands | 価格のバンド逸脱確認 | 期間: 20、σ: 2.0 | +| RSI | 買われすぎ/売られすぎ確認 | 期間: 14 | +| ADX | トレンド有無の判定(レジームフィルター) | 期間: 14、閾値: 25 | + +### 4.3 エントリー条件(3つ揃い + レジームフィルター) + +**ショートエントリー**: +1. Z-score > +2.0(平均から標準偏差2つ分以上、上に乖離) +2. RSI > 70(買われすぎ確認) +3. 価格 > ボリンジャーバンド上限(バンド逸脱確認) +4. ADX < 25(トレンドなし = レンジ相場であることの確認) + +**ロングエントリー**: 上記の逆条件。 + +**レジームフィルターの重要性**: ミーンリバージョンはレンジ相場でのみ機能する。トレンド相場で逆張りすると連敗する。ADX < 25 のフィルターにより、トレンド発生中のトレードを抑制し、不利な相場環境での損失を大幅に削減する。これは戦略の生死を分けるモジュールである。 + +### 4.4 エグジット条件 + +| 条件 | アクション | 目的 | +|---|---|---| +| Z-score が0付近(±0.3)に回帰 | 利確クローズ | 平均回帰完了 | +| 逆方向にZ-score ±3.0到達 | ストップロス | 損失制限 | +| 資金の3%(≒$20)の含み損 | 強制クローズ | 1トレードあたり最大損失の制限 | +| ポジション保有から24時間経過 | タイムアウトクローズ | デイトレードの時間軸を維持 | + +### 4.5 対象銘柄の選定基準 + +以下の基準を満たす銘柄を候補とする(バックテストで最終選定): + +- **流動性**: 24h出来高 $50M以上(スリッページ抑制) +- **ボラティリティ**: 適度なレンジ幅(狭すぎると利益が出ない、広すぎるとストップにかかりやすい) +- **平均回帰性**: ハースト指数 < 0.5(ランダムウォークより回帰傾向が強い) +- **スプレッド**: Bid-Ask スプレッドが十分に狭い + +候補: ETH, BTC, SOL, HYPE 等の主要銘柄(バックテストで確認) + +--- + +## 5. リスク管理設計 + +### 5.1 リスクパラメータ + +| パラメータ | 値 | 根拠 | +|---|---|---| +| 1トレード最大損失 | 資金の3%(≒$20) | 小資金での「1発退場」防止。$2,010ポジションで約1%の逆行に相当 | +| 日次損失リミット | 資金の8%(≒$54) | 連敗時の損失拡大防止。3連敗でリミットに到達し、当日のトレード停止 | +| 同時ポジション | 最大1つ | 証拠金の余裕がない小資金では複数ポジション管理は非現実的 | +| クールダウン | ストップロス後 30分 | 同方向への「ループ負け」防止 | +| ポジション使用率 | 証拠金の50%まで | フラッシュクラッシュ時の清算回避バッファ | + +### 5.2 清算リスク + +- 3倍レバレッジの理論的清算ライン: 約33%の逆行 +- Hyperliquidの維持証拠金要件により実際の清算ラインはやや手前 +- ストップロスが1%逆行で発動するため、通常は清算の遥か手前でポジションがクローズされる +- **フラッシュクラッシュへの備え**: Hyperliquidのサーバーサイド stop order を併用し、bot側のストップロスとの二重化で対応 + +### 5.3 リスクシナリオ + +| シナリオ | 確率 | 影響 | 対策 | +|---|---|---|---| +| ストップロスによる通常の損失 | 高 | -$20/回 | 想定内。勝率55%なら期待値プラス | +| 日次リミット到達 | 中 | -$54/日 | トレード停止で損失を限定 | +| フラッシュクラッシュでSLスリップ | 低 | -$100〜170 | サーバーサイドSL併用 + ポジション使用率50% | +| Hyperliquidダウンタイム | 低 | 不確定 | サーバーサイドSLが機能すれば影響限定 | +| 戦略のエッジ消失 | 中 | 月次赤字が継続 | 週次パフォーマンスレビューで早期検知。3週連続赤字で運用停止・再検証 | + +--- + +## 6. 想定収益モデル + +### 6.1 シナリオ別月次収益 + +前提: 資金 $670、レバレッジ 3倍、ポジションサイズ $2,010 + +| シナリオ | 月間トレード数 | 勝率 | 平均利益 | 平均損失 | 月次リターン | +|---|---|---|---|---|---| +| 楽観(レンジ相場継続) | 20〜30 | 55% | +1.2% | -0.8% | +10〜20%($65〜130) | +| 中立 | 15〜20 | 50% | +1.0% | -0.8% | +3〜8%($20〜50) | +| 悲観(トレンド相場) | 8〜12 | 45% | +0.8% | -1.0% | -10〜20%(-$65〜130) | + +### 6.2 手数料の影響 + +| 注文タイプ | 往復コスト | 月20トレードのコスト | +|---|---|---| +| 全てMaker | $0.60 | $12.00 | +| 全てTaker | $1.81 | $36.20 | +| Maker/Taker 半々 | $1.21 | $24.10 | + +**設計方針**: エントリーはlimit order(Maker)を基本とし、約定しない場合のみ一定時間後にmarket order(Taker)へフォールバック。エグジットの利確はlimit、ストップロスはmarket order。 + +### 6.3 正直な期待値 + +- 月$20〜50の利益が中央的なシナリオ +- 年間では-30%〜+100%の幅がある +- 最初の3ヶ月はバックテストとペーパートレードでエッジの検証に費やすべき +- **bot開発の技術経験そのものが、金銭的リターンと同等以上の価値を持つ** + +--- + +## 7. アーキテクチャ + +### 7.1 全体構成 + +``` +┌─────────────────────────────────────────────────┐ +│ GCP Compute Engine │ +├─────────────────────────────────────────────────┤ +│ │ +│ ┌──────────────┐ ┌──────────────────────┐ │ +│ │ Data Layer │ │ Signal Engine │ │ +│ │ │ │ │ │ +│ │ REST API ────┼───▶│ Z-Score Calculator │ │ +│ │ WebSocket ───┤ │ Bollinger Bands │ │ +│ │ SQLite DB ◀──┤ │ RSI Calculator │ │ +│ └──────────────┘ │ ADX Regime Filter │ │ +│ └──────────┬───────────┘ │ +│ │ │ +│ ▼ │ +│ ┌──────────────┐ ┌──────────────────────┐ │ +│ │ Risk Manager │◀──│ Execution Layer │ │ +│ │ │ │ │ │ +│ │ Stop Loss │───▶│ Order Manager │ │ +│ │ Daily Limit │ │ Position Tracker │ │ +│ │ Max Position │ │ Slippage Monitor │ │ +│ │ Cooldown │ └──────────────────────┘ │ +│ └──────────────┘ │ +│ │ +│ ┌──────────────┐ ┌──────────────────────┐ │ +│ │ Backtester │ │ Infrastructure │ │ +│ │ │ │ │ │ +│ │ Fee Model │ │ Structured Logger │ │ +│ │ Slippage Sim │ │ Discord/TG Alert │ │ +│ │ Perf Stats │ │ Health Check │ │ +│ └──────────────┘ └──────────────────────┘ │ +│ │ +└─────────────────────────────────────────────────┘ +``` + +### 7.2 技術スタック + +| コンポーネント | 技術選定 | 理由 | +|---|---|---| +| 言語 | Python 3.12+ | Hyperliquid SDK対応、pandas/numpy充実、情報量最多 | +| データ取得 | hyperliquid-python-sdk + websockets | 公式SDK + 低レイテンシリアルタイムデータ | +| データ保存 | SQLite(初期)→ DuckDB(分析時) | 軽量・セットアップ不要・分析クエリ高速 | +| 指標計算 | pandas + numpy(自前実装) | ta-libへの依存を避け、ロジックの完全な把握を優先 | +| バックテスト | 自前実装 | 手数料・スリッページ・ファンディングを正確にモデル化 | +| 実行環境 | GCP Compute Engine (e2-small) | 既存のGCP経験活用。e2-small で十分 | +| アラート | Discord Webhook or Telegram Bot API | ポジション開閉・異常検知の通知 | +| ログ | Python logging + JSON形式 | 構造化ログで後からの分析を容易に | + +### 7.3 Hyperliquid API エンドポイント + +| 用途 | エンドポイント | メソッド | +|---|---|---| +| 価格・OI等のメタ情報 | `POST /info` type: "metaAndAssetCtxs" | REST | +| OHLCV ヒストリカルデータ | `POST /info` type: "candleSnapshot" | REST | +| ファンディングレート履歴 | `POST /info` type: "fundingHistory" | REST | +| リアルタイム価格 | WebSocket subscription | WS | +| 注文発注 | `POST /exchange` | REST (署名付き) | +| ポジション確認 | `POST /info` type: "clearinghouseState" | REST | + +--- + +## 8. 開発ロードマップ + +### Phase 1: データパイプライン(1〜2週間) + +**目標**: Hyperliquid から過去データを取得し、ローカルDBに保存。リアルタイムデータ受信も確認。 + +- [ ] Hyperliquid Python SDK のセットアップ +- [ ] REST API で OHLCV キャンドルデータ取得(1m, 5m, 15m, 1h) +- [ ] SQLite スキーマ設計・データ保存パイプライン +- [ ] WebSocket でリアルタイム価格受信の動作確認 +- [ ] ファンディングレート履歴の取得・保存 + +**成果物**: 過去3ヶ月以上のOHLCVデータがSQLiteに格納された状態 + +### Phase 2: シグナルエンジン + バックテスター(1〜2週間) + +**目標**: 指標計算モジュールとバックテストエンジンを構築し、戦略のエッジを統計的に検証する。 + +- [ ] Z-Score 計算モジュール +- [ ] Bollinger Bands 計算モジュール +- [ ] RSI 計算モジュール +- [ ] ADX(レジームフィルター)計算モジュール +- [ ] バックテストエンジン(手数料・スリッページ・ファンディング込み) +- [ ] パフォーマンス指標の算出(勝率、期待値、シャープレシオ、最大ドローダウン、Calmarレシオ) +- [ ] パラメータ感度分析(オーバーフィッティング検出のためのウォークフォワード分析) + +**成果物**: バックテスト結果レポート。エッジの有無を数字で判断。 + +### Phase 3: リスク管理 + ペーパートレード(1週間) + +**目標**: リスク管理モジュールを実装し、リアルタイムデータで「発注しない」ペーパートレードを1〜2週間回す。 + +- [ ] ストップロスモジュール(1トレード最大損失3%) +- [ ] 日次損失リミットモジュール(8%で当日停止) +- [ ] ポジションサイズ計算(証拠金使用率50%上限) +- [ ] クールダウンタイマー(SL後30分エントリー禁止) +- [ ] ペーパートレードモード実装(シグナル発生 → 記録のみ) +- [ ] ペーパートレード結果とバックテスト結果の比較・検証 + +**成果物**: ペーパートレード結果がバックテストと概ね一致することの確認 + +### Phase 4: 本番接続(1週間) + +**目標**: 実際にHyperliquidに発注し、小額から段階的にスケールアップ。 + +- [ ] Hyperliquid Exchange API への署名付きリクエスト実装 +- [ ] Limit order → Taker fallback ロジック +- [ ] サーバーサイド Stop Order の設置 +- [ ] GCP VM へのデプロイ(systemd サービス化) +- [ ] Discord/Telegram アラート設置 +- [ ] 資金20%(≒$130)での試験運用開始 +- [ ] 2週間の安定稼働確認後、段階的にスケールアップ + +**成果物**: 本番稼働するbot + +### 継続運用フェーズ + +- [ ] 週次パフォーマンスレビュー +- [ ] 3週連続赤字で運用停止・戦略再検証 +- [ ] 月次でパラメータのリオプティマイゼーション検討 +- [ ] 新規銘柄候補のスクリーニング + +--- + +## 9. バックテスト設計指針 + +### 9.1 オーバーフィッティング対策 + +バックテストで最も危険なのは「過去データに完璧にフィットするが、将来のデータでは機能しないパラメータ」を見つけてしまうことである。以下の手法で対策する: + +- **ウォークフォワード分析**: データを「訓練期間」と「検証期間」に分割し、訓練期間で最適化したパラメータを検証期間でテスト。これをスライドしながら繰り返す +- **パラメータの安定性確認**: 最適パラメータの近傍(例: Z-score閾値を1.8〜2.2で変動させる)でも収益がプラスであることを確認。「ピンポイントでしか勝てない」パラメータは過学習の兆候 +- **シンプルさの優先**: パラメータ数を最小限に保つ。調整可能なパラメータが多いほど過学習リスクが上がる +- **複数銘柄での検証**: 1つの銘柄でしか機能しない戦略は、その銘柄固有のノイズを拾っている可能性が高い + +### 9.2 現実的なコストモデル + +バックテストに含めるべきコスト: + +- **取引手数料**: Maker 0.015% / Taker 0.045%(注文タイプに応じて) +- **スリッページ**: 銘柄の流動性に応じて0.01〜0.05%を加算 +- **ファンディング**: ポジション保有時間に応じて実際のレートを適用 +- **約定遅延**: シグナル発生から実際の約定まで1〜3キャンドル分の遅延を想定 + +--- + +## 10. 注意事項・免責 + +### 10.1 規制面 + +- Hyperliquid は分散型取引所であり、日本の金融庁に登録された暗号資産交換業者ではない +- 日本の居住者が海外の暗号資産デリバティブ取引を行うことの法的リスクを各自で確認すること +- 利益が発生した場合の確定申告義務を把握しておくこと(暗号資産の売買益は雑所得) + +### 10.2 リスク認識 + +- 本設計書に記載された収益予測はすべてシミュレーションに基づく仮定であり、将来の収益を保証するものではない +- 投入した資金の全額を失う可能性がある +- 過去のバックテスト結果は将来のパフォーマンスを保証しない +- レバレッジ取引は損失を増幅させる +- スマートコントラクトリスク、プラットフォームリスク、規制リスクが常に存在する + +--- + +## Appendix A: 用語集 + +| 用語 | 説明 | +|---|---| +| Z-Score | (現在価格 - 移動平均) / 標準偏差。平均からの乖離度を標準偏差で正規化した値 | +| Bollinger Bands | 移動平均 ± n×標準偏差のバンド。価格がバンド外に出ると「行き過ぎ」のシグナル | +| RSI | Relative Strength Index。0〜100の範囲で、70超が買われすぎ、30未満が売られすぎ | +| ADX | Average Directional Index。トレンドの強さを測る指標。25超でトレンド発生と判断 | +| Maker | オーダーブックに流動性を追加する注文(limit order)。手数料が安い | +| Taker | オーダーブックから流動性を除去する注文(market order)。手数料が高い | +| ファンディングレート | Perp価格とスポット価格の乖離を是正するための定期的な支払い/受取 | +| レジームフィルター | 相場がレンジかトレンドかを判定し、戦略に適さない環境でのトレードを抑制する仕組み | +| ウォークフォワード分析 | バックテストでのオーバーフィッティングを防ぐために、訓練期間と検証期間をスライドさせてテストする手法 | +| デルタニュートラル | 反対ポジションで価格変動リスクを相殺する手法 | +| ハースト指数 | 時系列の長期記憶特性を測る指標。0.5未満は平均回帰傾向、0.5超はトレンド持続傾向 | + +## Appendix B: 参考リソース + +- [Hyperliquid Docs](https://hyperliquid.gitbook.io/hyperliquid-docs) +- [Hyperliquid Funding](https://hyperliquid.gitbook.io/hyperliquid-docs/trading/funding) +- [Hyperliquid Fees](https://hyperliquid.gitbook.io/hyperliquid-docs/trading/fees) +- [Hyperliquid API - Perpetuals](https://hyperliquid.gitbook.io/hyperliquid-docs/for-developers/api/info-endpoint/perpetuals) +- [Hyperliquid Python SDK](https://github.com/hyperliquid-dex/hyperliquid-python-sdk) +- [Hyperliquid Funding Rate Comparison](https://app.hyperliquid.xyz/fundingComparison) diff --git a/main.py b/main.py new file mode 100644 index 0000000..1ae05fd --- /dev/null +++ b/main.py @@ -0,0 +1,792 @@ +"""Main entry point — data backfill and trading loop.""" + +from __future__ import annotations + +import argparse +import json +import logging +import sys +import time + +import pandas as pd + +from perp_bot.backtest.config import BacktestConfig +from perp_bot.backtest.engine import BacktestEngine +from perp_bot.backtest.sensitivity import ParameterSensitivityAnalyzer +from perp_bot.backtest.walk_forward import WalkForwardRunner +from perp_bot.config import load_config +from perp_bot.data.client import HyperliquidClient, INTERVAL_MS +from perp_bot.data.db import Database +from perp_bot.data.ingest import DataIngestor +from perp_bot.data.prediction_client import KalshiClient, PolymarketClient +from perp_bot.data.ws_client import WsClient +from perp_bot.execution.executor import Executor, PaperExecutor +from perp_bot.execution.live_executor import LiveExecutor +from perp_bot.infra.alerts import send_discord_alert, send_telegram_alert +from perp_bot.infra.health import HealthChecker +from perp_bot.infra.logging import setup_logging +from perp_bot.ipc.protocol import get_socket_path +from perp_bot.ipc.server import DaemonStateServer +from perp_bot.ipc.state import DaemonState +from perp_bot.risk.manager import RiskManager +from perp_bot.signals.engine import Signal, SignalEngine +from perp_bot.signals.prediction import ( + PredictionRegime, + compute_regime, + funding_side_preference, + rate_change_score, + war_risk_score, +) + +logger = logging.getLogger(__name__) + + +def run_backfill(config_path: str | None = None) -> None: + """One-shot: backfill historical data into SQLite.""" + config = load_config(config_path) + db = Database(config.data.db_path) + client = HyperliquidClient() + ingestor = DataIngestor(config, db, client) + + try: + ingestor.run_full_backfill() + logger.info("Backfill complete") + finally: + db.close() + + +def run_trading_loop(config_path: str | None = None) -> None: + """Main trading loop — runs until interrupted. + + Supports 'paper' and 'live' modes. In live mode, uses LiveExecutor + for real order placement and WebSocket for real-time price feeds. + Exposes runtime state via Unix socket for TUI attachment. + """ + config = load_config(config_path) + db = Database(config.data.db_path) + client = HyperliquidClient() + ingestor = DataIngestor(config, db, client) + signal_engine = SignalEngine(config.signals) + risk_manager = RiskManager(config, db) + + # Auto-stop after 3 consecutive losing weeks (§5.3) + if not _check_losing_weeks(db) and not any(a == "--force" for a in sys.argv): + logger.error("Use --force to override the losing weeks halt") + _alert(config, "BOT HALTED: 3 consecutive losing weeks") + sys.exit(1) + + # Mode-specific setup + executor: Executor + ws_client: WsClient | None = None + + if config.mode == "live": + if not config.hl_private_key: + logger.error("HL_PRIVATE_KEY required for live mode") + sys.exit(1) + executor = LiveExecutor(config, db) + # Set leverage — refuse to start if it fails + for sym in config.trading.symbols: + if not executor.set_leverage(sym, config.trading.leverage): + logger.error("Failed to set leverage for %s — aborting live mode", sym) + sys.exit(1) + # Reconcile exchange positions against DB + _reconcile_positions(executor, db, config) + logger.info("Live mode — real orders will be placed") + else: + executor = PaperExecutor(db) + logger.info("Paper mode — no real orders") + + # WebSocket for real-time prices (both modes benefit) + ws_client = WsClient() + ws_client.subscribe_mid_prices(config.trading.symbols) + + # Health checker for periodic heartbeat + health_checker = HealthChecker(config, db, ws_client, executor) + + # Prediction market clients + prediction_clients = _init_prediction_clients(config) + + # IPC: daemon state + socket server for TUI attachment + daemon_state = DaemonState(mode=config.mode) + socket_path = get_socket_path(config.data.db_path) + state_server = DaemonStateServer(socket_path, daemon_state, executor, db) + state_server.start() + + # Set up file logging for TUI log tailing + log_file = str(socket_path.parent / "perp-bot.log") + setup_logging(log_file=log_file) + + tf = config.data.primary_timeframe + interval_ms = INTERVAL_MS[tf] + min_candles = max( + config.signals.zscore_lookback, + config.signals.bollinger_period, + config.signals.rsi_period, + config.signals.adx_period * 2, # ADX needs more warmup + ) + + logger.info("Starting trading loop — mode=%s, timeframe=%s", config.mode, tf) + if config.discord_webhook_url: + send_discord_alert( + config.discord_webhook_url, + f"Bot started — mode={config.mode}", + ) + + last_prediction_poll_ms = 0 + prediction_regime_label = "NORMAL" + + try: + while True: + # WebSocket health check — reconnect if stale + if ws_client and not ws_client.is_healthy(): + logger.warning("WebSocket stale — triggering reconnect") + _alert(config, "WS RECONNECT: price feed was stale") + ws_client.reconnect() + + # Check pause state — skip tick but still monitor health + if daemon_state.paused: + daemon_state.update( + ws_healthy=ws_client.is_healthy() if ws_client else False, + ) + health_checker.tick(prediction_regime_label) + _sleep_until_next_candle(interval_ms) + continue + + last_prediction_poll_ms, prediction_regime_label = _tick( + config, db, client, ingestor, signal_engine, + risk_manager, executor, tf, min_candles, + prediction_clients, last_prediction_poll_ms, + ws_client, daemon_state, + ) + + # Update daemon state after tick + _update_daemon_state( + daemon_state, config, ws_client, risk_manager, + executor, prediction_regime_label, db, + ) + + health_checker.tick(prediction_regime_label) + _sleep_until_next_candle(interval_ms) + except KeyboardInterrupt: + logger.info("Shutting down") + finally: + state_server.stop() + if ws_client: + ws_client.close() + db.close() + + +def _reconcile_positions( + executor: LiveExecutor, db: Database, config, +) -> None: + """Reconcile exchange positions against DB on startup. + + - Exchange has position but DB doesn't → create DB record from exchange state + - DB has open position but exchange doesn't → close DB record as reconciled + """ + for symbol in config.trading.symbols: + exchange_pos = executor.get_exchange_position(symbol) + db_trades = db.get_open_trades(symbol) + + if exchange_pos and not db_trades: + # Exchange has a position we don't know about + now = int(time.time() * 1000) + trade_id = db.insert_trade({ + "symbol": symbol, + "side": exchange_pos["side"], + "entry_time": now, + "entry_price": exchange_pos["entry_price"], + "size_usd": exchange_pos["entry_price"] * exchange_pos["size_base"], + "is_paper": 0, + }) + logger.warning( + "RECONCILED: Found exchange position %s %s (%.4f @ %.2f) " + "not in DB — created trade #%d", + exchange_pos["side"], symbol, + exchange_pos["size_base"], exchange_pos["entry_price"], + trade_id, + ) + _alert( + config, + f"RECONCILE: Adopted orphan {exchange_pos['side']} " + f"{symbol} @ {exchange_pos['entry_price']:.2f}", + ) + + elif db_trades and not exchange_pos: + # DB thinks we have a position but exchange doesn't + for trade in db_trades: + now = int(time.time() * 1000) + db.close_trade( + trade["id"], now, trade["entry_price"], 0.0, + "reconciled_missing", + ) + logger.warning( + "RECONCILED: DB trade #%d (%s %s) has no exchange position — " + "closed as reconciled_missing", + trade["id"], trade["side"], symbol, + ) + _alert( + config, + f"RECONCILE: Closed {len(db_trades)} orphan DB trade(s) for {symbol}", + ) + + elif exchange_pos and db_trades: + logger.info( + "Position sync OK: %s %s matches DB trade #%d", + exchange_pos["side"], symbol, db_trades[0]["id"], + ) + + +def _check_losing_weeks(db: Database, num_weeks: int = 3) -> bool: + """Check if the last N weeks were all net-negative. Returns True if safe to start.""" + now_ms = int(time.time() * 1000) + week_ms = 7 * 24 * 3600 * 1000 + + losing_count = 0 + for i in range(num_weeks): + end = now_ms - i * week_ms + start = end - week_ms + trades = db.get_closed_trades_in_range(start, end) + if not trades: + return True # Not enough history — safe to start + weekly_pnl = sum(t.get("pnl", 0) or 0 for t in trades) + if weekly_pnl < 0: + losing_count += 1 + + if losing_count >= num_weeks: + logger.error( + "HALTED: %d consecutive losing weeks detected", num_weeks, + ) + return False + return True + + +def _init_prediction_clients(config) -> dict: + """Create prediction market client instances based on configured sources.""" + clients: dict = {} + if not config.prediction or not config.prediction.enabled: + return clients + sources = {m.source for m in config.prediction.markets} + if "polymarket" in sources: + clients["polymarket"] = PolymarketClient() + if "kalshi" in sources: + clients["kalshi"] = KalshiClient() + return clients + + +def _tick( + config, db, client, ingestor, signal_engine, + risk_manager, executor, tf, min_candles, + prediction_clients=None, last_prediction_poll_ms=0, + ws_client=None, daemon_state=None, +) -> tuple[int, str]: + """Single iteration of the trading loop. Returns (last_prediction_poll_ms, regime_label).""" + prediction_clients = prediction_clients or {} + + # --- Prediction market polling --- + prediction_regime = PredictionRegime.NORMAL + preferred_side = None + now_ms = int(time.time() * 1000) + + if config.prediction and config.prediction.enabled and prediction_clients: + poll_interval_ms = config.prediction.poll_interval_minutes * 60_000 + if now_ms - last_prediction_poll_ms >= poll_interval_ms: + last_prediction_poll_ms = now_ms + ingestor.update_predictions(prediction_clients, config.prediction) + + # Compute regime from latest cached data + prediction_regime, preferred_side = _compute_prediction_state(db, config) + + for symbol in config.trading.symbols: + # Update candles + ingestor.update_candles(symbol) + + # Load candles into DataFrame + candles = db.get_candles(symbol, tf, limit=min_candles + 50) + if len(candles) < min_candles: + logger.warning( + "Not enough candles for %s %s (%d < %d)", + symbol, tf, len(candles), min_candles, + ) + continue + + df = pd.DataFrame(candles) + df = signal_engine.compute_indicators(df) + + # Current position state + open_trades = db.get_open_trades(symbol) + position_side = open_trades[0]["side"] if open_trades else None + + # Check position-level exits first + if open_trades: + trade = open_trades[0] + current_price = _get_price(ws_client, client, symbol) + + # Capital-based stop loss + if risk_manager.check_stop_loss( + trade["entry_price"], current_price, trade["side"] + ): + pnl = _compute_pnl(trade, current_price) + executor.close_position( + trade["id"], symbol, current_price, pnl, + "capital_stop_loss", + ) + risk_manager.record_stop_loss() + _alert(config, f"STOP LOSS {symbol} pnl={pnl:.2f}") + continue + + # Position timeout + if risk_manager.check_position_timeout(trade["entry_time"]): + pnl = _compute_pnl(trade, current_price) + executor.close_position( + trade["id"], symbol, current_price, pnl, + "timeout_24h", + ) + _alert(config, f"TIMEOUT {symbol} pnl={pnl:.2f}") + continue + + # Signal evaluation with prediction regime + result = signal_engine.evaluate( + df, position_side, prediction_regime, preferred_side, + ) + + # Expose signal to daemon state for TUI + if daemon_state is not None: + daemon_state.latest_signals[symbol] = { + "signal": result.signal.value, + "reason": result.reason, + "zscore": result.zscore_value, + "rsi": result.rsi_value, + "adx": result.adx_value, + "price": result.price, + } + + if result.signal == Signal.CLOSE and open_trades: + trade = open_trades[0] + current_price = _get_price(ws_client, client, symbol) + pnl = _compute_pnl(trade, current_price) + executor.close_position( + trade["id"], symbol, current_price, pnl, result.reason, + ) + _alert(config, f"CLOSE {symbol} {result.reason} pnl={pnl:.2f}") + + elif result.signal in (Signal.LONG, Signal.SHORT) and not open_trades: + risk_check = risk_manager.check_entry() + if risk_check.allowed: + size = risk_manager.compute_position_size(prediction_regime) + if size <= 0: + logger.info( + "Position size zero — regime=%s", + prediction_regime.value, + ) + continue + price = _get_price(ws_client, client, symbol) + executor.open_position(symbol, result.signal.value, size, price) + regime_tag = "" + if prediction_regime != PredictionRegime.NORMAL: + regime_tag = f" [{prediction_regime.value}]" + _alert( + config, + f"OPEN {result.signal.value} {symbol}" + f" @ {price:.2f} size=${size:.0f}" + f"{regime_tag}", + ) + # Alert if server-side SL placement failed + if ( + isinstance(executor, LiveExecutor) + and executor._sl_failed + ): + _alert( + config, + f"CRITICAL: {symbol} UNHEDGED — " + f"server-side SL failed!", + ) + else: + logger.info("Entry blocked: %s", risk_check.reason) + + return last_prediction_poll_ms, prediction_regime.value + + +def _get_price(ws_client, rest_client, symbol: str) -> float: + """Get mid price from WebSocket cache, falling back to REST.""" + if ws_client is not None: + ws_price = ws_client.get_mid_price(symbol) + if ws_price is not None: + return ws_price + return rest_client.get_mid_price(symbol) + + +def _compute_prediction_state(db, config): + """Compute prediction regime and preferred side from latest DB snapshots.""" + snapshots = db.get_latest_predictions() + if not snapshots: + return PredictionRegime.NORMAL, None + + war_snapshots = [s for s in snapshots if s["category"] == "war_risk"] + rate_snapshots = [s for s in snapshots if s["category"] == "rate_change"] + + market_weights = { + m.slug: m.weight + for m in config.prediction.markets + if m.category == "war_risk" + } + w_risk = war_risk_score(war_snapshots, market_weights) + r_change = rate_change_score(rate_snapshots) + + regime = compute_regime(w_risk, r_change, config.prediction) + preferred = funding_side_preference(r_change, config.prediction.rate_change_threshold) + + if regime != PredictionRegime.NORMAL: + logger.info( + "Prediction regime=%s war_risk=%.3f rate_change=%.3f preferred=%s", + regime.value, w_risk, r_change, preferred, + ) + + return regime, preferred + + +def _compute_pnl(trade: dict, current_price: float) -> float: + """Compute unrealised P&L for a trade.""" + if trade["side"] == "long": + return (current_price - trade["entry_price"]) / trade["entry_price"] * trade["size_usd"] + else: + return (trade["entry_price"] - current_price) / trade["entry_price"] * trade["size_usd"] + + +def _alert(config, message: str) -> None: + logger.info(message) + if config.discord_webhook_url: + send_discord_alert(config.discord_webhook_url, message) + if config.telegram_bot_token and config.telegram_chat_id: + send_telegram_alert( + config.telegram_bot_token, config.telegram_chat_id, message, + ) + + +def _sleep_until_next_candle(interval_ms: int) -> None: + """Sleep until the next candle boundary plus a small buffer.""" + now_ms = int(time.time() * 1000) + next_candle = ((now_ms // interval_ms) + 1) * interval_ms + sleep_secs = (next_candle - now_ms) / 1000 + 5 # 5s buffer for candle to finalise + logger.debug("Sleeping %.1fs until next candle", sleep_secs) + time.sleep(sleep_secs) + + +def _update_daemon_state( + daemon_state: DaemonState, + config, ws_client, risk_manager, executor, + prediction_regime_label: str, db, +) -> None: + """Refresh the daemon state after a tick for IPC clients.""" + now_ms = int(time.time() * 1000) + + # Mid prices + mid_prices = {} + if ws_client: + for sym in config.trading.symbols: + p = ws_client.get_mid_price(sym) + if p is not None: + mid_prices[sym] = p + + # Risk check + risk_check = risk_manager.check_entry() + + # Cooldown remaining + cooldown_s = 0.0 + if risk_manager._last_stop_loss_time is not None: + elapsed = now_ms - risk_manager._last_stop_loss_time + remaining = risk_manager.risk.cooldown_seconds * 1000 - elapsed + if remaining > 0: + cooldown_s = remaining / 1000 + + # Daily PnL + day_start = (int(time.time()) - int(time.time()) % 86400) * 1000 + daily_pnl = db.get_daily_pnl(day_start) + + # Slippage + slippage_stats = {"count": 0, "avg_pct": 0.0, "max_pct": 0.0} + if hasattr(executor, "get_slippage_stats"): + slippage_stats = executor.get_slippage_stats() + + daemon_state.update( + tick_count=daemon_state.tick_count + 1, + last_tick_ms=now_ms, + mid_prices=mid_prices, + ws_healthy=ws_client.is_healthy() if ws_client else False, + prediction_regime=prediction_regime_label, + risk_allowed=risk_check.allowed, + risk_reason=risk_check.reason, + cooldown_remaining_s=cooldown_s, + daily_pnl=daily_pnl, + slippage_stats=slippage_stats, + ) + + +def run_status(config_path: str | None = None) -> None: + """One-shot: connect to daemon, print formatted state, exit.""" + from perp_bot.ipc.client import DaemonClient + + config = load_config(config_path) + socket_path = get_socket_path(config.data.db_path) + client = DaemonClient(socket_path) + + if not client.is_running(): + print("Daemon is not running (socket not found or not responding)") + sys.exit(1) + + state = client.get_state() + if state is None: + print("Failed to get state from daemon") + sys.exit(1) + + print(json.dumps(state, indent=2)) + + +def run_tui(config_path: str | None = None) -> None: + """Launch the TUI dashboard connecting to a running daemon.""" + from perp_bot.tui.app import PerpBotApp + + config = load_config(config_path) + app = PerpBotApp(config) + app.run() + + +def run_backfill_predictions(config_path: str | None = None) -> None: + """One-shot: fetch current prediction market snapshots into SQLite.""" + config = load_config(config_path) + if not config.prediction or not config.prediction.enabled: + logger.error("Prediction markets not configured or disabled") + sys.exit(1) + + db = Database(config.data.db_path) + clients = _init_prediction_clients(config) + ingestor = DataIngestor(config, db, HyperliquidClient()) + + try: + count = ingestor.update_predictions(clients, config.prediction) + logger.info("Inserted %d prediction snapshots", count) + finally: + db.close() + + +def run_backtest(config_path: str | None = None) -> None: + """Run a backtest over all historical data.""" + config = load_config(config_path) + bt_config = config.backtest or BacktestConfig() + db = Database(config.data.db_path) + + try: + engine = BacktestEngine(config, bt_config) + for symbol in config.trading.symbols: + logger.info("Running backtest for %s", symbol) + result = engine.run(db, symbol) + print(result.summary()) + if bt_config.export_trades_csv: + result.trades_to_csv(bt_config.export_trades_csv) + logger.info("Trades exported to %s", bt_config.export_trades_csv) + finally: + db.close() + + +def run_walkforward(config_path: str | None = None) -> None: + """Run walk-forward analysis.""" + config = load_config(config_path) + bt_config = config.backtest or BacktestConfig() + db = Database(config.data.db_path) + + try: + for symbol in config.trading.symbols: + # Find data range + candles = db.get_candles(symbol, config.data.primary_timeframe, limit=1) + if not candles: + logger.warning("No data for %s", symbol) + continue + first_time = candles[0]["open_time"] + latest = db.get_latest_candle_time(symbol, config.data.primary_timeframe) + if latest is None: + continue + + runner = WalkForwardRunner(config, bt_config) + result = runner.run(db, symbol, first_time, latest) + print(result.summary()) + finally: + db.close() + + +def run_sensitivity(config_path: str | None = None) -> None: + """Run parameter sensitivity analysis.""" + config = load_config(config_path) + bt_config = config.backtest or BacktestConfig() + db = Database(config.data.db_path) + + try: + analyzer = ParameterSensitivityAnalyzer(config, bt_config) + for symbol in config.trading.symbols: + logger.info("Running sensitivity analysis for %s", symbol) + report = analyzer.run(db, symbol) + print(report.summary()) + finally: + db.close() + + +def run_screen(config_path: str | None = None) -> None: + """Screen symbols for mean-reversion suitability (§4.5). + + Criteria: Hurst < 0.5, 24h volume > $50M, tight spread. + """ + from perp_bot.signals.indicators import hurst_exponent + + config = load_config(config_path) + db = Database(config.data.db_path) + client = HyperliquidClient() + + try: + meta = client.get_asset_meta() + if not meta or len(meta) < 2: + print("Failed to fetch asset metadata") + return + + universe = meta[0].get("universe", []) + asset_ctxs = meta[1] if len(meta) > 1 else [] + + # Build volume + symbol lookup + vol_map: dict[str, float] = {} + for i, asset in enumerate(universe): + name = asset["name"] + if i < len(asset_ctxs): + ctx = asset_ctxs[i] + vol_map[name] = float(ctx.get("dayNtlVlm", 0)) + + symbols = [a["name"] for a in universe] + + header = ( + f"{'Symbol':<10} {'Vol24h($M)':>12} {'Spread%':>9}" + f" {'Hurst':>8} {'Verdict'}" + ) + print(header) + print("-" * len(header)) + + for symbol in symbols: + vol_24h = vol_map.get(symbol, 0) + + # §4.5: 24h volume > $50M + if vol_24h < 50_000_000: + continue + + # Bid-ask spread + try: + l2 = client.info.l2_snapshot(symbol) + bid = float(l2["levels"][0][0]["px"]) + ask = float(l2["levels"][1][0]["px"]) + mid = (bid + ask) / 2 + spread_pct = (ask - bid) / mid * 100 if mid > 0 else 999 + except Exception: + spread_pct = 999.0 + + # Hurst exponent from historical candles + candles = db.get_candles( + symbol, config.data.primary_timeframe, limit=500, + ) + if len(candles) < 50: + continue + closes = pd.Series([c["close"] for c in candles]) + h = hurst_exponent(closes) + + # Verdict: must pass all §4.5 criteria + passes_hurst = h < 0.5 + passes_spread = spread_pct < 0.05 + if passes_hurst and passes_spread: + verdict = "CANDIDATE" + elif passes_hurst: + verdict = "MEAN-REV (wide spread)" + elif h > 0.55: + verdict = "TRENDING" + else: + verdict = "RANDOM" + + vol_m = vol_24h / 1_000_000 + print( + f"{symbol:<10} {vol_m:>12.1f} {spread_pct:>8.4f}%" + f" {h:>8.3f} {verdict}" + ) + finally: + db.close() + + +def run_compare( + config_path: str | None = None, days: int = 7, +) -> None: + """Compare paper trading results against backtest for the same period.""" + from perp_bot.reporting.compare import compare_paper_vs_backtest + + config = load_config(config_path) + db = Database(config.data.db_path) + now_ms = int(time.time() * 1000) + start_ms = now_ms - days * 24 * 3600 * 1000 + + try: + for symbol in config.trading.symbols: + print(compare_paper_vs_backtest( + config, db, symbol, start_ms, now_ms, + )) + print() + finally: + db.close() + + +def run_review(config_path: str | None = None, weeks: int = 1) -> None: + """Print a weekly performance review.""" + from perp_bot.reporting.weekly import generate_weekly_report + + config = load_config(config_path) + db = Database(config.data.db_path) + try: + print(generate_weekly_report(db, weeks)) + finally: + db.close() + + +def main() -> None: + setup_logging() + parser = argparse.ArgumentParser(description="Hyperliquid mean-reversion bot") + parser.add_argument( + "command", + choices=[ + "backfill", "trade", "backfill-predictions", + "backtest", "walkforward", "sensitivity", + "screen", "review", "compare", "tui", "status", + ], + help="Command to run", + ) + parser.add_argument("--config", default=None, help="Path to config.yaml") + parser.add_argument("--weeks", type=int, default=1, help="Weeks for review report") + parser.add_argument("--days", type=int, default=7, help="Days for compare range") + parser.add_argument("--force", action="store_true", help="Override safety halts") + args = parser.parse_args() + + if args.command == "backfill": + run_backfill(args.config) + elif args.command == "trade": + run_trading_loop(args.config) + elif args.command == "backfill-predictions": + run_backfill_predictions(args.config) + elif args.command == "backtest": + run_backtest(args.config) + elif args.command == "walkforward": + run_walkforward(args.config) + elif args.command == "sensitivity": + run_sensitivity(args.config) + elif args.command == "screen": + run_screen(args.config) + elif args.command == "review": + run_review(args.config, args.weeks) + elif args.command == "compare": + run_compare(args.config, args.days) + elif args.command == "tui": + run_tui(args.config) + elif args.command == "status": + run_status(args.config) + + +if __name__ == "__main__": + main() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..80538ab --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,48 @@ +[project] +name = "perp-bot" +version = "0.1.0" +description = "Hyperliquid mean-reversion perpetual futures trading bot" +readme = "README.md" +requires-python = ">=3.12" +dependencies = [ + "hyperliquid-python-sdk", + "websockets", + "pandas", + "numpy", + "eth_account", + "python-dotenv", + "pyyaml", + "textual>=1.0.0", +] + +[project.optional-dependencies] +dev = [ + "pytest", + "pytest-asyncio", + "ruff", +] + +[tool.ruff] +line-length = 100 +target-version = "py312" + +[tool.ruff.lint] +select = ["E", "F", "I", "W"] + +[tool.pytest.ini_options] +testpaths = ["tests"] +asyncio_mode = "auto" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[dependency-groups] +dev = [ + "pytest>=9.0.2", + "pytest-asyncio>=1.3.0", + "ruff>=0.15.6", +] + +[tool.hatch.build.targets.wheel] +packages = ["src/perp_bot"] diff --git a/scratch.py b/scratch.py new file mode 100644 index 0000000..e69de29 diff --git a/src/perp_bot/__init__.py b/src/perp_bot/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/perp_bot/backtest/__init__.py b/src/perp_bot/backtest/__init__.py new file mode 100644 index 0000000..2044644 --- /dev/null +++ b/src/perp_bot/backtest/__init__.py @@ -0,0 +1,23 @@ +"""Backtester module — historical simulation with realistic cost modelling.""" + +from perp_bot.backtest.config import BacktestConfig +from perp_bot.backtest.cost_model import FeeModel, FundingModel, SlippageModel +from perp_bot.backtest.engine import BacktestEngine +from perp_bot.backtest.metrics import compute_all +from perp_bot.backtest.results import BacktestResult, TradeRecord, WalkForwardResult +from perp_bot.backtest.sensitivity import ParameterSensitivityAnalyzer +from perp_bot.backtest.walk_forward import WalkForwardRunner + +__all__ = [ + "BacktestConfig", + "BacktestEngine", + "BacktestResult", + "FeeModel", + "FundingModel", + "ParameterSensitivityAnalyzer", + "SlippageModel", + "TradeRecord", + "WalkForwardResult", + "WalkForwardRunner", + "compute_all", +] diff --git a/src/perp_bot/backtest/config.py b/src/perp_bot/backtest/config.py new file mode 100644 index 0000000..482d1cb --- /dev/null +++ b/src/perp_bot/backtest/config.py @@ -0,0 +1,5 @@ +"""Re-export BacktestConfig from the centralised config module.""" + +from perp_bot.config import BacktestConfig + +__all__ = ["BacktestConfig"] diff --git a/src/perp_bot/backtest/cost_model.py b/src/perp_bot/backtest/cost_model.py new file mode 100644 index 0000000..5bf02a2 --- /dev/null +++ b/src/perp_bot/backtest/cost_model.py @@ -0,0 +1,90 @@ +"""Cost models — fees, slippage, and funding for realistic backtest simulation.""" + +from __future__ import annotations + +import random + + +class FeeModel: + """Hyperliquid fee model: maker for limit orders, taker for market/stop orders.""" + + def __init__(self, maker_rate: float = 0.00015, taker_rate: float = 0.00045) -> None: + self.maker_rate = maker_rate + self.taker_rate = taker_rate + + def entry_fee(self, notional: float) -> float: + """Entry uses maker rate (limit order).""" + return notional * self.maker_rate + + def exit_fee(self, notional: float, is_stop_loss: bool = False) -> float: + """Exit uses taker rate for stop-loss (market order), maker otherwise.""" + rate = self.taker_rate if is_stop_loss else self.maker_rate + return notional * rate + + +class SlippageModel: + """Simulates random slippage that always worsens the fill price.""" + + def __init__( + self, + min_pct: float = 0.0001, + max_pct: float = 0.0005, + seed: int = 42, + ) -> None: + self.min_pct = min_pct + self.max_pct = max_pct + self.rng = random.Random(seed) + + def apply(self, price: float, side: str, is_entry: bool) -> float: + """Apply slippage to a price. Always worsens the fill. + + Long entry / short exit → price goes up (pay more / receive less). + Short entry / long exit → price goes down (receive less / pay more). + """ + slip_pct = self.rng.uniform(self.min_pct, self.max_pct) + + # Determine if slippage moves price up or down + if (side == "long" and is_entry) or (side == "short" and not is_entry): + return price * (1 + slip_pct) + else: + return price * (1 - slip_pct) + + +_HOUR_MS = 3_600_000 + + +class FundingModel: + """Accumulates Hyperliquid hourly funding costs over a position's lifetime. + + Hyperliquid settles funding every hour (1/8 of the 8h rate). + Long pays positive rate; short receives it (and vice versa for negative). + """ + + def __init__(self, funding_rates: list[dict]) -> None: + # Build {hour_boundary_ms: hourly_rate} + self._rates: dict[int, float] = {} + for fr in funding_rates: + ts = fr["time"] + # Snap to hour boundary + hour_ms = (ts // _HOUR_MS) * _HOUR_MS + self._rates[hour_ms] = fr["rate"] + + def cost_between( + self, side: str, notional: float, start_ms: int, end_ms: int + ) -> float: + """Compute total funding cost for holding a position. + + Positive return = cost to the trader (negative P&L impact). + """ + total = 0.0 + # Iterate over each hourly boundary between start and end + first_hour = ((start_ms // _HOUR_MS) + 1) * _HOUR_MS + hour = first_hour + while hour <= end_ms: + rate = self._rates.get(hour, 0.0) + if side == "long": + total += rate * notional + else: + total -= rate * notional + hour += _HOUR_MS + return total diff --git a/src/perp_bot/backtest/engine.py b/src/perp_bot/backtest/engine.py new file mode 100644 index 0000000..950dac1 --- /dev/null +++ b/src/perp_bot/backtest/engine.py @@ -0,0 +1,221 @@ +"""Core backtest engine — row-by-row simulation over historical candles.""" + +from __future__ import annotations + +import logging + +import pandas as pd + +from perp_bot.backtest.config import BacktestConfig +from perp_bot.backtest.cost_model import FeeModel, FundingModel, SlippageModel +from perp_bot.backtest.executor import BacktestExecutor +from perp_bot.backtest.metrics import compute_all +from perp_bot.backtest.results import BacktestResult +from perp_bot.backtest.risk_adapter import BacktestRiskManager +from perp_bot.config import BotConfig +from perp_bot.data.db import Database +from perp_bot.signals.engine import Signal, SignalEngine +from perp_bot.signals.prediction import PredictionRegime + +logger = logging.getLogger(__name__) + + +class BacktestEngine: + """Runs a historical simulation of the mean-reversion strategy.""" + + def __init__(self, bot_config: BotConfig, bt_config: BacktestConfig) -> None: + self.bot_config = bot_config + self.bt_config = bt_config + self.signal_engine = SignalEngine(bot_config.signals) + + def run( + self, + db: Database, + symbol: str, + start_time_ms: int | None = None, + end_time_ms: int | None = None, + prediction_regime: PredictionRegime = PredictionRegime.NORMAL, + ) -> BacktestResult: + """Execute a full backtest over the specified time range. + + Args: + db: Database with historical candles and funding rates. + symbol: Trading pair to backtest. + start_time_ms: Start of backtest window (None = all data). + end_time_ms: End of backtest window (None = all data). + prediction_regime: Fixed regime for the entire backtest. + """ + bt = self.bt_config + cfg = self.bot_config + + # --- Load data --- + candles = db.get_candles( + symbol, cfg.data.primary_timeframe, start_time=start_time_ms, limit=100_000 + ) + if not candles: + logger.warning("No candles found for %s", symbol) + return BacktestResult() + + df = pd.DataFrame(candles) + + # Filter by end_time if specified + if end_time_ms is not None: + df = df[df["open_time"] <= end_time_ms].reset_index(drop=True) + if df.empty: + return BacktestResult() + + # Compute indicators on the full DataFrame once + df = self.signal_engine.compute_indicators(df) + + # Load funding rates for the period + data_start = int(df.iloc[0]["open_time"]) + data_end = int(df.iloc[-1]["open_time"]) + funding_rates = db.get_funding_rates(symbol, data_start, data_end) + + # --- Build components --- + fee_model = FeeModel(bt.maker_fee_rate, bt.taker_fee_rate) + slippage_model = SlippageModel(bt.slippage_min_pct, bt.slippage_max_pct, bt.seed) + funding_model = FundingModel(funding_rates) + executor = BacktestExecutor(fee_model, slippage_model, funding_model) + risk = BacktestRiskManager(cfg) + + # --- Determine warmup --- + sc = cfg.signals + warmup = max(sc.zscore_lookback, sc.bollinger_period, sc.rsi_period, sc.adx_period * 2) + + # --- Row-by-row simulation --- + equity_curve: list[float] = [] + capital = cfg.trading.capital_usd + closed_pnl = 0.0 + pending_order: _PendingOrder | None = None + + for i in range(warmup, len(df)): + row = df.iloc[i] + time_ms = int(row["open_time"]) + price = float(row["close"]) + + # --- Fill pending order --- + if pending_order is not None and i >= pending_order.fill_index: + if bt.cancel_if_signal_gone: + # Re-evaluate signal at fill time + check_result = self.signal_engine.evaluate( + df.iloc[: i + 1], + position_side=None, + prediction_regime=prediction_regime, + ) + if check_result.signal != pending_order.signal: + pending_order = None # cancel — signal disappeared + else: + size = risk.compute_position_size(prediction_regime) + if size > 0 and risk.check_entry(time_ms): + executor.open_position( + symbol, pending_order.signal.value, + size, price, time_ms, + ) + pending_order = None + else: + size = risk.compute_position_size(prediction_regime) + if size > 0 and risk.check_entry(time_ms): + executor.open_position( + symbol, pending_order.signal.value, + size, price, time_ms, + ) + pending_order = None + + # --- Manage open position --- + if executor.has_position: + t = executor.open_trade + # Capital-based stop loss + if risk.check_stop_loss(t.entry_price, price, t.side, t.size_usd): + record = executor.close_position( + price, time_ms, "capital_stop_loss", is_stop_loss=True + ) + risk.record_trade_close(record.net_pnl, time_ms, is_stop_loss=True) + closed_pnl += record.net_pnl + # Position timeout + elif risk.check_position_timeout(t.entry_time_ms, time_ms): + record = executor.close_position(price, time_ms, "timeout_24h") + risk.record_trade_close(record.net_pnl, time_ms) + closed_pnl += record.net_pnl + else: + # Signal-based exit + sig_result = self.signal_engine.evaluate( + df.iloc[: i + 1], + position_side=t.side, + prediction_regime=prediction_regime, + ) + if sig_result.signal == Signal.CLOSE: + is_stop = "zscore_stop" in sig_result.reason + record = executor.close_position( + price, time_ms, sig_result.reason, is_stop_loss=is_stop + ) + risk.record_trade_close(record.net_pnl, time_ms, is_stop_loss=is_stop) + closed_pnl += record.net_pnl + + # --- Entry signals (only if no position and no pending order) --- + if not executor.has_position and pending_order is None: + sig_result = self.signal_engine.evaluate( + df.iloc[: i + 1], + position_side=None, + prediction_regime=prediction_regime, + ) + if sig_result.signal in (Signal.LONG, Signal.SHORT): + if risk.check_entry(time_ms): + if bt.entry_delay_candles == 0: + size = risk.compute_position_size(prediction_regime) + if size > 0: + executor.open_position( + symbol, sig_result.signal.value, + size, price, time_ms, + ) + else: + pending_order = _PendingOrder( + signal=sig_result.signal, + fill_index=i + bt.entry_delay_candles, + ) + + # --- Equity curve --- + unrealised = 0.0 + if executor.has_position: + t = executor.open_trade + if t.side == "long": + unrealised = (price - t.entry_price) / t.entry_price * t.size_usd + else: + unrealised = (t.entry_price - price) / t.entry_price * t.size_usd + equity_curve.append(capital + closed_pnl + unrealised) + + # --- Force-close any open position at end --- + if executor.has_position: + last_price = float(df.iloc[-1]["close"]) + last_time = int(df.iloc[-1]["open_time"]) + record = executor.close_position(last_price, last_time, "backtest_end") + risk.record_trade_close(record.net_pnl, last_time) + closed_pnl += record.net_pnl + # Update last equity point + if equity_curve: + equity_curve[-1] = capital + closed_pnl + + # --- Build result --- + metrics = compute_all(executor.trades, equity_curve, capital) + result = BacktestResult( + trades=executor.trades, + equity_curve=equity_curve, + metrics=metrics, + initial_capital=capital, + start_time_ms=data_start, + end_time_ms=data_end, + ) + + if bt.export_trades_csv: + result.trades_to_csv(bt.export_trades_csv) + + return result + + +class _PendingOrder: + """A pending order waiting for execution delay to elapse.""" + __slots__ = ("signal", "fill_index") + + def __init__(self, signal: Signal, fill_index: int) -> None: + self.signal = signal + self.fill_index = fill_index diff --git a/src/perp_bot/backtest/executor.py b/src/perp_bot/backtest/executor.py new file mode 100644 index 0000000..caa8dfe --- /dev/null +++ b/src/perp_bot/backtest/executor.py @@ -0,0 +1,113 @@ +"""In-memory backtest executor — no database, no real orders.""" + +from __future__ import annotations + +from perp_bot.backtest.cost_model import FeeModel, FundingModel, SlippageModel +from perp_bot.backtest.results import TradeRecord + + +class BacktestExecutor: + """Executes trades in-memory with slippage and fee application.""" + + def __init__( + self, + fee_model: FeeModel, + slippage_model: SlippageModel, + funding_model: FundingModel, + ) -> None: + self.fee_model = fee_model + self.slippage_model = slippage_model + self.funding_model = funding_model + self.trades: list[TradeRecord] = [] + self.open_trade: _OpenTrade | None = None + self._next_id = 1 + + def open_position( + self, symbol: str, side: str, size_usd: float, price: float, time_ms: int + ) -> None: + """Open a new position with slippage and entry fee.""" + filled_price = self.slippage_model.apply(price, side, is_entry=True) + entry_fee = self.fee_model.entry_fee(size_usd) + self.open_trade = _OpenTrade( + id=self._next_id, + symbol=symbol, + side=side, + size_usd=size_usd, + entry_time_ms=time_ms, + entry_price=filled_price, + raw_entry_price=price, + entry_fee=entry_fee, + ) + self._next_id += 1 + + def close_position( + self, price: float, time_ms: int, reason: str, is_stop_loss: bool = False + ) -> TradeRecord: + """Close the open position and return the completed TradeRecord.""" + t = self.open_trade + assert t is not None, "No open position to close" + + filled_price = self.slippage_model.apply(price, t.side, is_entry=False) + exit_fee = self.fee_model.exit_fee(t.size_usd, is_stop_loss=is_stop_loss) + + # Raw P&L from price movement + if t.side == "long": + pnl = (filled_price - t.entry_price) / t.entry_price * t.size_usd + else: + pnl = (t.entry_price - filled_price) / t.entry_price * t.size_usd + + # Funding cost over holding period + funding_cost = self.funding_model.cost_between( + t.side, t.size_usd, t.entry_time_ms, time_ms + ) + + net_pnl = pnl - t.entry_fee - exit_fee - funding_cost + + record = TradeRecord( + id=t.id, + symbol=t.symbol, + side=t.side, + entry_time_ms=t.entry_time_ms, + exit_time_ms=time_ms, + entry_price=t.entry_price, + exit_price=filled_price, + raw_entry_price=t.raw_entry_price, + raw_exit_price=price, + size_usd=t.size_usd, + pnl=pnl, + entry_fee=t.entry_fee, + exit_fee=exit_fee, + funding_cost=funding_cost, + net_pnl=net_pnl, + exit_reason=reason, + ) + self.trades.append(record) + self.open_trade = None + return record + + @property + def has_position(self) -> bool: + return self.open_trade is not None + + +class _OpenTrade: + """Lightweight container for an in-flight trade.""" + + __slots__ = ( + "id", "symbol", "side", "size_usd", + "entry_time_ms", "entry_price", "raw_entry_price", "entry_fee", + ) + + def __init__( + self, id: int, symbol: str, side: str, size_usd: float, + entry_time_ms: int, entry_price: float, raw_entry_price: float, + entry_fee: float, + ) -> None: + self.id = id + self.symbol = symbol + self.side = side + self.size_usd = size_usd + self.entry_time_ms = entry_time_ms + self.entry_price = entry_price + self.raw_entry_price = raw_entry_price + self.entry_fee = entry_fee diff --git a/src/perp_bot/backtest/metrics.py b/src/perp_bot/backtest/metrics.py new file mode 100644 index 0000000..ffd7b24 --- /dev/null +++ b/src/perp_bot/backtest/metrics.py @@ -0,0 +1,137 @@ +"""Performance metrics — pure functions on trade lists and equity curves.""" + +from __future__ import annotations + +import math + +from perp_bot.backtest.results import TradeRecord + + +def win_rate(trades: list[TradeRecord]) -> float: + """Fraction of trades with positive net P&L.""" + if not trades: + return 0.0 + winners = sum(1 for t in trades if t.net_pnl > 0) + return winners / len(trades) + + +def expected_value(trades: list[TradeRecord]) -> float: + """Average net P&L per trade.""" + if not trades: + return 0.0 + return sum(t.net_pnl for t in trades) / len(trades) + + +def profit_factor(trades: list[TradeRecord]) -> float: + """Gross wins / gross losses. Returns inf if no losses.""" + gross_wins = sum(t.net_pnl for t in trades if t.net_pnl > 0) + gross_losses = abs(sum(t.net_pnl for t in trades if t.net_pnl < 0)) + if gross_losses == 0: + return float("inf") if gross_wins > 0 else 0.0 + return gross_wins / gross_losses + + +def sharpe_ratio(equity_curve: list[float], annualization_factor: float = 252.0) -> float: + """Annualised Sharpe ratio from an equity curve. + + Computes daily returns from consecutive equity values. + """ + if len(equity_curve) < 2: + return 0.0 + returns = [ + (equity_curve[i] - equity_curve[i - 1]) / equity_curve[i - 1] + for i in range(1, len(equity_curve)) + if equity_curve[i - 1] != 0 + ] + if not returns: + return 0.0 + mean_r = sum(returns) / len(returns) + if len(returns) < 2: + return 0.0 + variance = sum((r - mean_r) ** 2 for r in returns) / (len(returns) - 1) + std_r = math.sqrt(variance) + if std_r == 0: + return 0.0 + return (mean_r / std_r) * math.sqrt(annualization_factor) + + +def max_drawdown(equity_curve: list[float]) -> tuple[float, float]: + """Returns (max_drawdown_pct, max_drawdown_usd) from peak-to-trough.""" + if len(equity_curve) < 2: + return 0.0, 0.0 + peak = equity_curve[0] + max_dd_pct = 0.0 + max_dd_usd = 0.0 + for val in equity_curve: + if val > peak: + peak = val + dd_usd = peak - val + dd_pct = dd_usd / peak if peak > 0 else 0.0 + if dd_pct > max_dd_pct: + max_dd_pct = dd_pct + max_dd_usd = dd_usd + return max_dd_pct, max_dd_usd + + +def calmar_ratio( + total_return_pct: float, max_dd_pct: float, period_years: float +) -> float: + """Calmar ratio = annualised return / max drawdown.""" + if max_dd_pct == 0 or period_years == 0: + return 0.0 + annualised_return = total_return_pct / period_years + return annualised_return / max_dd_pct + + +def avg_holding_hours(trades: list[TradeRecord]) -> float: + """Average holding time across all trades, in hours.""" + if not trades: + return 0.0 + total_ms = sum(t.exit_time_ms - t.entry_time_ms for t in trades) + return total_ms / len(trades) / 3_600_000 + + +def compute_all( + trades: list[TradeRecord], + equity_curve: list[float], + initial_capital: float, +) -> dict: + """Compute all performance metrics and return as a dictionary.""" + total_net_pnl = sum(t.net_pnl for t in trades) + total_fees = sum(t.entry_fee + t.exit_fee for t in trades) + total_funding = sum(t.funding_cost for t in trades) + total_slippage = sum( + abs(t.entry_price - t.raw_entry_price) / t.raw_entry_price * t.size_usd + + abs(t.exit_price - t.raw_exit_price) / t.raw_exit_price * t.size_usd + for t in trades + ) if trades else 0.0 + + dd_pct, dd_usd = max_drawdown(equity_curve) + + # Period duration for Calmar + if len(equity_curve) > 1: + period_days = len(equity_curve) - 1 + period_years = period_days / 365.0 + total_return_pct = ( + (equity_curve[-1] - initial_capital) / initial_capital if initial_capital > 0 else 0.0 + ) + else: + period_years = 0.0 + total_return_pct = 0.0 + + return { + "total_trades": len(trades), + "win_rate": win_rate(trades), + "expected_value": expected_value(trades), + "profit_factor": profit_factor(trades), + "sharpe_ratio": sharpe_ratio(equity_curve), + "max_drawdown_pct": dd_pct, + "max_drawdown_usd": dd_usd, + "calmar_ratio": calmar_ratio(total_return_pct, dd_pct, period_years), + "avg_holding_hours": avg_holding_hours(trades), + "total_net_pnl": total_net_pnl, + "total_fees": total_fees, + "total_funding": total_funding, + "total_slippage": total_slippage, + "total_return_pct": total_return_pct, + } diff --git a/src/perp_bot/backtest/results.py b/src/perp_bot/backtest/results.py new file mode 100644 index 0000000..2594757 --- /dev/null +++ b/src/perp_bot/backtest/results.py @@ -0,0 +1,115 @@ +"""Result dataclasses for backtest output.""" + +from __future__ import annotations + +import csv +from dataclasses import dataclass, field + + +@dataclass +class TradeRecord: + """A single completed backtest trade with full cost breakdown.""" + + id: int + symbol: str + side: str # "long" | "short" + entry_time_ms: int + exit_time_ms: int + entry_price: float # after slippage + exit_price: float # after slippage + raw_entry_price: float # pre-slippage + raw_exit_price: float # pre-slippage + size_usd: float + pnl: float # raw P&L before costs + entry_fee: float + exit_fee: float + funding_cost: float + net_pnl: float # pnl - entry_fee - exit_fee - funding_cost + exit_reason: str + + +@dataclass +class BacktestResult: + """Aggregate result from a single backtest run.""" + + trades: list[TradeRecord] = field(default_factory=list) + equity_curve: list[float] = field(default_factory=list) + metrics: dict = field(default_factory=dict) + initial_capital: float = 0.0 + start_time_ms: int = 0 + end_time_ms: int = 0 + + def summary(self) -> str: + """Human-readable performance summary.""" + m = self.metrics + lines = [ + "═══ Backtest Summary ═══", + f" Period: {self.start_time_ms} → {self.end_time_ms}", + f" Trades: {m.get('total_trades', 0)}", + f" Win rate: {m.get('win_rate', 0):.1%}", + f" Net P&L: ${m.get('total_net_pnl', 0):.2f}", + f" Expected value: ${m.get('expected_value', 0):.2f}/trade", + f" Profit factor: {m.get('profit_factor', 0):.2f}", + f" Sharpe ratio: {m.get('sharpe_ratio', 0):.2f}", + f" Max drawdown: {m.get('max_drawdown_pct', 0):.1%}" + f" (${m.get('max_drawdown_usd', 0):.2f})", + f" Calmar ratio: {m.get('calmar_ratio', 0):.2f}", + f" Avg holding: {m.get('avg_holding_hours', 0):.1f}h", + f" Total fees: ${m.get('total_fees', 0):.2f}", + f" Total funding: ${m.get('total_funding', 0):.2f}", + f" Total slippage: ${m.get('total_slippage', 0):.2f}", + "════════════════════════", + ] + return "\n".join(lines) + + def trades_to_csv(self, path: str) -> None: + """Export trade records to CSV.""" + if not self.trades: + return + fieldnames = [ + "id", "symbol", "side", "entry_time_ms", "exit_time_ms", + "entry_price", "exit_price", "raw_entry_price", "raw_exit_price", + "size_usd", "pnl", "entry_fee", "exit_fee", "funding_cost", + "net_pnl", "exit_reason", + ] + with open(path, "w", newline="") as f: + writer = csv.DictWriter(f, fieldnames=fieldnames) + writer.writeheader() + for t in self.trades: + writer.writerow({k: getattr(t, k) for k in fieldnames}) + + +@dataclass +class WalkForwardWindow: + """One train/test window pair.""" + + train_start_ms: int + train_end_ms: int + test_start_ms: int + test_end_ms: int + train_result: BacktestResult + test_result: BacktestResult + + +@dataclass +class WalkForwardResult: + """Aggregate walk-forward analysis result.""" + + windows: list[WalkForwardWindow] = field(default_factory=list) + overfitting_score: float = 0.0 + + def summary(self) -> str: + lines = ["═══ Walk-Forward Analysis ═══"] + for i, w in enumerate(self.windows): + train_m = w.train_result.metrics + test_m = w.test_result.metrics + lines.append( + f" Window {i+1}: " + f"Train Sharpe={train_m.get('sharpe_ratio', 0):.2f} " + f"Test Sharpe={test_m.get('sharpe_ratio', 0):.2f} " + f"Train trades={train_m.get('total_trades', 0)} " + f"Test trades={test_m.get('total_trades', 0)}" + ) + lines.append(f" Overfitting score: {self.overfitting_score:.2f} (0=good, 1=severe)") + lines.append("═════════════════════════════") + return "\n".join(lines) diff --git a/src/perp_bot/backtest/risk_adapter.py b/src/perp_bot/backtest/risk_adapter.py new file mode 100644 index 0000000..9feff4c --- /dev/null +++ b/src/perp_bot/backtest/risk_adapter.py @@ -0,0 +1,82 @@ +"""Backtest-compatible risk manager — uses explicit timestamps instead of time.time().""" + +from __future__ import annotations + +from perp_bot.config import BotConfig +from perp_bot.signals.prediction import PredictionRegime + +_DAY_MS = 86_400_000 + + +class BacktestRiskManager: + """Reimplements core risk checks with injectable time for deterministic backtesting.""" + + def __init__(self, config: BotConfig) -> None: + self.risk = config.risk + self.trading = config.trading + self.prediction = config.prediction + self._last_stop_loss_time_ms: int | None = None + self._daily_pnl: float = 0.0 + self._current_day: int = 0 # day boundary tracker + + def check_entry(self, current_time_ms: int) -> bool: + """Check whether a new entry is allowed at the given time.""" + # Daily loss limit (reset on day boundary) + day = current_time_ms // _DAY_MS + if day != self._current_day: + self._daily_pnl = 0.0 + self._current_day = day + + max_daily_loss = self.trading.capital_usd * self.risk.daily_loss_limit_pct + if self._daily_pnl <= -max_daily_loss: + return False + + # Cooldown after stop-loss + if self._last_stop_loss_time_ms is not None: + elapsed = current_time_ms - self._last_stop_loss_time_ms + if elapsed < self.risk.cooldown_seconds * 1000: + return False + + return True + + def check_stop_loss( + self, entry_price: float, current_price: float, side: str, size_usd: float + ) -> bool: + """Check if the capital-based stop-loss threshold is breached.""" + if side == "long": + pnl = (current_price - entry_price) / entry_price * size_usd + else: + pnl = (entry_price - current_price) / entry_price * size_usd + max_loss = self.trading.capital_usd * self.risk.max_loss_per_trade_pct + return pnl <= -max_loss + + def check_position_timeout(self, entry_time_ms: int, current_time_ms: int) -> bool: + """Check if position has exceeded maximum holding time.""" + elapsed_hours = (current_time_ms - entry_time_ms) / 3_600_000 + return elapsed_hours >= self.risk.position_timeout_hours + + def compute_position_size( + self, prediction_regime: PredictionRegime = PredictionRegime.NORMAL + ) -> float: + """Compute position size in USD, regime-adjusted.""" + if prediction_regime == PredictionRegime.CRISIS: + return 0.0 + max_margin = self.trading.capital_usd * self.trading.margin_usage_limit + size = max_margin * self.trading.leverage + if prediction_regime == PredictionRegime.HIGH_RISK and self.prediction: + size *= self.prediction.position_size_reduction + return size + + def record_trade_close( + self, pnl: float, exit_time_ms: int, is_stop_loss: bool = False + ) -> None: + """Update accumulators after a trade closes.""" + # Ensure day boundary is current + day = exit_time_ms // _DAY_MS + if day != self._current_day: + self._daily_pnl = 0.0 + self._current_day = day + self._daily_pnl += pnl + + if is_stop_loss: + self._last_stop_loss_time_ms = exit_time_ms diff --git a/src/perp_bot/backtest/sensitivity.py b/src/perp_bot/backtest/sensitivity.py new file mode 100644 index 0000000..2bbd3d5 --- /dev/null +++ b/src/perp_bot/backtest/sensitivity.py @@ -0,0 +1,120 @@ +"""Parameter sensitivity analysis — one-at-a-time sweep.""" + +from __future__ import annotations + +import logging +from dataclasses import replace + +from perp_bot.backtest.config import BacktestConfig +from perp_bot.backtest.engine import BacktestEngine +from perp_bot.config import BotConfig +from perp_bot.data.db import Database +from perp_bot.signals.prediction import PredictionRegime + +logger = logging.getLogger(__name__) + +# Default parameter ranges for one-at-a-time sweep +DEFAULT_RANGES: dict[str, list] = { + "zscore_entry_threshold": [1.8, 1.9, 2.0, 2.1, 2.2], + "zscore_exit_threshold": [0.2, 0.3, 0.4, 0.5], + "rsi_overbought": [65, 70, 75], + "rsi_oversold": [25, 30, 35], + "adx_threshold": [20, 25, 30], + "zscore_lookback": [15, 20, 25, 30], + "bollinger_period": [15, 20, 25], +} + + +class ParameterSensitivityAnalyzer: + """One-at-a-time parameter sweep to test strategy robustness.""" + + def __init__( + self, + bot_config: BotConfig, + bt_config: BacktestConfig, + param_ranges: dict[str, list] | None = None, + ) -> None: + self.bot_config = bot_config + self.bt_config = bt_config + self.param_ranges = param_ranges or DEFAULT_RANGES + + def run( + self, + db: Database, + symbol: str, + start_time_ms: int | None = None, + end_time_ms: int | None = None, + prediction_regime: PredictionRegime = PredictionRegime.NORMAL, + ) -> SensitivityReport: + """Run one-at-a-time sweep over all parameter ranges.""" + results: dict[str, list[ParameterResult]] = {} + + for param_name, values in self.param_ranges.items(): + param_results: list[ParameterResult] = [] + logger.info("Sweeping %s: %s", param_name, values) + + for val in values: + # Create modified signal config + modified_signals = replace(self.bot_config.signals, **{param_name: val}) + modified_config = replace(self.bot_config, signals=modified_signals) + engine = BacktestEngine(modified_config, self.bt_config) + + bt_result = engine.run( + db, symbol, start_time_ms, end_time_ms, prediction_regime + ) + param_results.append(ParameterResult( + value=val, + sharpe=bt_result.metrics.get("sharpe_ratio", 0.0), + net_pnl=bt_result.metrics.get("total_net_pnl", 0.0), + win_rate=bt_result.metrics.get("win_rate", 0.0), + num_trades=bt_result.metrics.get("total_trades", 0), + max_dd_pct=bt_result.metrics.get("max_drawdown_pct", 0.0), + )) + + results[param_name] = param_results + + return SensitivityReport(results=results) + + +class ParameterResult: + """Result for a single parameter value.""" + __slots__ = ("value", "sharpe", "net_pnl", "win_rate", "num_trades", "max_dd_pct") + + def __init__( + self, value, sharpe: float, net_pnl: float, win_rate: float, + num_trades: int, max_dd_pct: float, + ) -> None: + self.value = value + self.sharpe = sharpe + self.net_pnl = net_pnl + self.win_rate = win_rate + self.num_trades = num_trades + self.max_dd_pct = max_dd_pct + + +class SensitivityReport: + """Aggregate sensitivity analysis results.""" + + def __init__(self, results: dict[str, list[ParameterResult]]) -> None: + self.results = results + + def summary(self) -> str: + lines = ["═══ Sensitivity Analysis ═══"] + for param, param_results in self.results.items(): + lines.append(f"\n {param}:") + for pr in param_results: + profitable = "✓" if pr.net_pnl > 0 else "✗" + lines.append( + f" {pr.value:>8} → Sharpe={pr.sharpe:+.2f} " + f"PnL=${pr.net_pnl:+.2f} WR={pr.win_rate:.0%} " + f"trades={pr.num_trades} DD={pr.max_dd_pct:.1%} [{profitable}]" + ) + # Robustness check + profitable_count = sum(1 for pr in param_results if pr.net_pnl > 0) + pct = profitable_count / len(param_results) if param_results else 0 + robust = "ROBUST" if pct >= 0.7 else "FRAGILE" + lines.append( + f" → {profitable_count}/{len(param_results)} profitable ({pct:.0%}) — {robust}" + ) + lines.append("\n═════════════════════════════") + return "\n".join(lines) diff --git a/src/perp_bot/backtest/walk_forward.py b/src/perp_bot/backtest/walk_forward.py new file mode 100644 index 0000000..0c06058 --- /dev/null +++ b/src/perp_bot/backtest/walk_forward.py @@ -0,0 +1,98 @@ +"""Walk-forward analysis — train/test window orchestrator for overfitting detection.""" + +from __future__ import annotations + +import logging + +from perp_bot.backtest.config import BacktestConfig +from perp_bot.backtest.engine import BacktestEngine +from perp_bot.backtest.results import WalkForwardResult, WalkForwardWindow +from perp_bot.config import BotConfig +from perp_bot.data.db import Database +from perp_bot.signals.prediction import PredictionRegime + +logger = logging.getLogger(__name__) + +_DAY_MS = 86_400_000 + + +class WalkForwardRunner: + """Runs walk-forward analysis over sliding train/test windows.""" + + def __init__(self, bot_config: BotConfig, bt_config: BacktestConfig) -> None: + self.bot_config = bot_config + self.bt_config = bt_config + self.engine = BacktestEngine(bot_config, bt_config) + + def run( + self, + db: Database, + symbol: str, + data_start_ms: int, + data_end_ms: int, + prediction_regime: PredictionRegime = PredictionRegime.NORMAL, + ) -> WalkForwardResult: + """Execute walk-forward analysis over the data range.""" + bt = self.bt_config + train_ms = bt.train_days * _DAY_MS + test_ms = bt.test_days * _DAY_MS + step_ms = bt.step_days * _DAY_MS + + windows: list[WalkForwardWindow] = [] + window_start = data_start_ms + + while window_start + train_ms + test_ms <= data_end_ms: + train_start = window_start + train_end = train_start + train_ms + test_start = train_end + test_end = test_start + test_ms + + logger.info( + "Walk-forward window %d: train=%d→%d, test=%d→%d", + len(windows) + 1, train_start, train_end, test_start, test_end, + ) + + train_result = self.engine.run( + db, symbol, train_start, train_end, prediction_regime + ) + test_result = self.engine.run( + db, symbol, test_start, test_end, prediction_regime + ) + + windows.append(WalkForwardWindow( + train_start_ms=train_start, + train_end_ms=train_end, + test_start_ms=test_start, + test_end_ms=test_end, + train_result=train_result, + test_result=test_result, + )) + + window_start += step_ms + + # Compute overfitting score + overfitting_score = _compute_overfitting_score(windows) + + return WalkForwardResult(windows=windows, overfitting_score=overfitting_score) + + +def _compute_overfitting_score(windows: list[WalkForwardWindow]) -> float: + """Overfitting score = 1 - (avg_oos_sharpe / avg_is_sharpe). + + 0 = no overfitting (OOS matches IS), 1 = severe (OOS is zero). + Clamped to [0, 1]. + """ + if not windows: + return 0.0 + + is_sharpes = [w.train_result.metrics.get("sharpe_ratio", 0.0) for w in windows] + oos_sharpes = [w.test_result.metrics.get("sharpe_ratio", 0.0) for w in windows] + + avg_is = sum(is_sharpes) / len(is_sharpes) if is_sharpes else 0.0 + avg_oos = sum(oos_sharpes) / len(oos_sharpes) if oos_sharpes else 0.0 + + if avg_is <= 0: + return 0.0 + + score = 1.0 - (avg_oos / avg_is) + return max(0.0, min(1.0, score)) diff --git a/src/perp_bot/config.py b/src/perp_bot/config.py new file mode 100644 index 0000000..70d9daa --- /dev/null +++ b/src/perp_bot/config.py @@ -0,0 +1,174 @@ +"""Centralised configuration — loads YAML config + .env secrets.""" + +from __future__ import annotations + +import os +from dataclasses import dataclass, field +from pathlib import Path + +import yaml +from dotenv import load_dotenv + +_PROJECT_ROOT = Path(__file__).resolve().parent.parent.parent + + +@dataclass(frozen=True) +class TradingConfig: + symbols: list[str] + leverage: int + capital_usd: float + margin_usage_limit: float + + +@dataclass(frozen=True) +class SignalConfig: + zscore_lookback: int + zscore_entry_threshold: float + zscore_exit_threshold: float + zscore_stop_threshold: float + bollinger_period: int + bollinger_std: float + rsi_period: int + rsi_overbought: int + rsi_oversold: int + adx_period: int + adx_threshold: int + + +@dataclass(frozen=True) +class RiskConfig: + max_loss_per_trade_pct: float + daily_loss_limit_pct: float + max_positions: int + cooldown_seconds: int + position_timeout_hours: int + + +@dataclass(frozen=True) +class DataConfig: + timeframes: list[str] + primary_timeframe: str + history_days: int + db_path: str + + +@dataclass(frozen=True) +class ExecutionConfig: + order_type: str + taker_fallback_seconds: int + use_server_side_stop: bool + + +@dataclass(frozen=True) +class PredictionMarketDef: + slug: str + source: str # "polymarket" | "kalshi" + market_id: str + category: str # "war_risk" | "rate_change" + weight: float + + +@dataclass(frozen=True) +class PredictionConfig: + enabled: bool + poll_interval_minutes: int + war_risk_threshold: float + war_risk_crisis_threshold: float + rate_change_threshold: float + position_size_reduction: float + markets: list[PredictionMarketDef] + + +@dataclass(frozen=True) +class BacktestConfig: + """All parameters controlling backtest behaviour.""" + + maker_fee_rate: float = 0.00015 + taker_fee_rate: float = 0.00045 + slippage_min_pct: float = 0.0001 + slippage_max_pct: float = 0.0005 + entry_delay_candles: int = 1 + cancel_if_signal_gone: bool = True + train_days: int = 60 + test_days: int = 15 + step_days: int = 15 + export_trades_csv: str | None = None + seed: int = 42 + + +@dataclass(frozen=True) +class BotConfig: + trading: TradingConfig + signals: SignalConfig + risk: RiskConfig + data: DataConfig + execution: ExecutionConfig + mode: str # "paper" | "live" + prediction: PredictionConfig | None = None + backtest: BacktestConfig | None = None + + # Secrets from env + hl_private_key: str = field(default="", repr=False) + hl_wallet_address: str = "" + discord_webhook_url: str = "" + telegram_bot_token: str = field(default="", repr=False) + telegram_chat_id: str = "" + + +def load_config(config_path: str | Path | None = None) -> BotConfig: + """Load config from YAML file + environment variables.""" + load_dotenv(_PROJECT_ROOT / ".env") + + if config_path is None: + config_path = _PROJECT_ROOT / "config.yaml" + config_path = Path(config_path) + + with open(config_path) as f: + raw = yaml.safe_load(f) + + prediction = None + if "prediction" in raw: + pred_raw = raw["prediction"] + markets = [PredictionMarketDef(**m) for m in pred_raw.get("markets", [])] + prediction = PredictionConfig( + enabled=pred_raw.get("enabled", False), + poll_interval_minutes=pred_raw.get("poll_interval_minutes", 15), + war_risk_threshold=pred_raw.get("war_risk_threshold", 0.4), + war_risk_crisis_threshold=pred_raw.get("war_risk_crisis_threshold", 0.7), + rate_change_threshold=pred_raw.get("rate_change_threshold", 0.3), + position_size_reduction=pred_raw.get("position_size_reduction", 0.5), + markets=markets, + ) + + backtest = None + if "backtest" in raw: + bt_raw = raw["backtest"] + backtest = BacktestConfig( + maker_fee_rate=bt_raw.get("maker_fee_rate", 0.00015), + taker_fee_rate=bt_raw.get("taker_fee_rate", 0.00045), + slippage_min_pct=bt_raw.get("slippage_min_pct", 0.0001), + slippage_max_pct=bt_raw.get("slippage_max_pct", 0.0005), + entry_delay_candles=bt_raw.get("entry_delay_candles", 1), + cancel_if_signal_gone=bt_raw.get("cancel_if_signal_gone", True), + train_days=bt_raw.get("train_days", 60), + test_days=bt_raw.get("test_days", 15), + step_days=bt_raw.get("step_days", 15), + export_trades_csv=bt_raw.get("export_trades_csv"), + seed=bt_raw.get("seed", 42), + ) + + return BotConfig( + trading=TradingConfig(**raw["trading"]), + signals=SignalConfig(**raw["signals"]), + risk=RiskConfig(**raw["risk"]), + data=DataConfig(**raw["data"]), + execution=ExecutionConfig(**raw["execution"]), + mode=raw.get("mode", "paper"), + prediction=prediction, + backtest=backtest, + hl_private_key=os.getenv("HL_PRIVATE_KEY", ""), + hl_wallet_address=os.getenv("HL_WALLET_ADDRESS", ""), + discord_webhook_url=os.getenv("DISCORD_WEBHOOK_URL", ""), + telegram_bot_token=os.getenv("TELEGRAM_BOT_TOKEN", ""), + telegram_chat_id=os.getenv("TELEGRAM_CHAT_ID", ""), + ) diff --git a/src/perp_bot/data/__init__.py b/src/perp_bot/data/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/perp_bot/data/client.py b/src/perp_bot/data/client.py new file mode 100644 index 0000000..bcafa5f --- /dev/null +++ b/src/perp_bot/data/client.py @@ -0,0 +1,124 @@ +"""Hyperliquid REST + WebSocket client wrapper.""" + +from __future__ import annotations + +import logging +import time + +from hyperliquid.info import Info +from hyperliquid.utils import constants + +logger = logging.getLogger(__name__) + +# Hyperliquid candleSnapshot interval strings +INTERVAL_MAP = { + "1m": "1m", + "5m": "5m", + "15m": "15m", + "1h": "1h", + "4h": "4h", + "1d": "1d", +} + +# Milliseconds per interval for pagination +INTERVAL_MS = { + "1m": 60_000, + "5m": 300_000, + "15m": 900_000, + "1h": 3_600_000, + "4h": 14_400_000, + "1d": 86_400_000, +} + +# Max candles per API request +MAX_CANDLES_PER_REQUEST = 5000 + + +class HyperliquidClient: + def __init__(self, base_url: str | None = None) -> None: + self.info = Info(base_url or constants.MAINNET_API_URL, skip_ws=True) + + def fetch_candles( + self, + symbol: str, + interval: str, + start_time: int, + end_time: int | None = None, + ) -> list[dict]: + """Fetch OHLCV candles, paginating if the range exceeds one request. + + Returns list of dicts with keys: + symbol, timeframe, open_time, open, high, low, close, volume, num_trades + """ + if end_time is None: + end_time = int(time.time() * 1000) + + interval_str = INTERVAL_MAP.get(interval, interval) + interval_ms = INTERVAL_MS[interval] + all_candles: list[dict] = [] + cursor = start_time + + while cursor < end_time: + raw = self.info.candles_snapshot(symbol, interval_str, cursor, end_time) + if not raw: + break + + for c in raw: + all_candles.append({ + "symbol": symbol, + "timeframe": interval, + "open_time": c["t"], + "open": float(c["o"]), + "high": float(c["h"]), + "low": float(c["l"]), + "close": float(c["c"]), + "volume": float(c["v"]), + "num_trades": int(c["n"]), + }) + + # Advance cursor past the last candle + last_t = raw[-1]["t"] + cursor = last_t + interval_ms + + logger.debug( + "Fetched %d candles for %s %s, cursor now %d", + len(raw), symbol, interval, cursor, + ) + + return all_candles + + def fetch_funding_history( + self, + symbol: str, + start_time: int, + end_time: int | None = None, + ) -> list[dict]: + """Fetch funding rate history. + + Returns list of dicts: symbol, time, rate, premium + """ + if end_time is None: + end_time = int(time.time() * 1000) + + raw = self.info.funding_history(symbol, start_time, end_time) + return [ + { + "symbol": symbol, + "time": entry["time"], + "rate": float(entry["fundingRate"]), + "premium": float(entry.get("premium", 0)), + } + for entry in raw + ] + + def get_mid_price(self, symbol: str) -> float: + """Get current mid price from the L2 order book.""" + l2 = self.info.l2_snapshot(symbol) + best_bid = float(l2["levels"][0][0]["px"]) + best_ask = float(l2["levels"][1][0]["px"]) + return (best_bid + best_ask) / 2 + + def get_asset_meta(self) -> list[dict]: + """Get metadata for all perpetual assets.""" + meta = self.info.meta_and_asset_ctxs() + return meta diff --git a/src/perp_bot/data/db.py b/src/perp_bot/data/db.py new file mode 100644 index 0000000..d683ea3 --- /dev/null +++ b/src/perp_bot/data/db.py @@ -0,0 +1,253 @@ +"""SQLite persistence for OHLCV candles and funding rates.""" + +from __future__ import annotations + +import sqlite3 +from pathlib import Path + +_SCHEMA = """ +CREATE TABLE IF NOT EXISTS candles ( + symbol TEXT NOT NULL, + timeframe TEXT NOT NULL, + open_time INTEGER NOT NULL, -- epoch ms + open REAL NOT NULL, + high REAL NOT NULL, + low REAL NOT NULL, + close REAL NOT NULL, + volume REAL NOT NULL, + num_trades INTEGER NOT NULL, + PRIMARY KEY (symbol, timeframe, open_time) +); + +CREATE TABLE IF NOT EXISTS funding_rates ( + symbol TEXT NOT NULL, + time INTEGER NOT NULL, -- epoch ms + rate REAL NOT NULL, + premium REAL NOT NULL, + PRIMARY KEY (symbol, time) +); + +CREATE TABLE IF NOT EXISTS prediction_snapshots ( + source TEXT NOT NULL, -- "polymarket" | "kalshi" + market_id TEXT NOT NULL, + market_slug TEXT NOT NULL, + category TEXT NOT NULL, -- "war_risk" | "rate_change" + timestamp INTEGER NOT NULL, -- epoch ms + probability REAL NOT NULL, -- 0.0 to 1.0 + volume_24h REAL, + PRIMARY KEY (source, market_id, timestamp) +); + +CREATE TABLE IF NOT EXISTS trades ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + symbol TEXT NOT NULL, + side TEXT NOT NULL, -- "long" | "short" + entry_time INTEGER NOT NULL, + entry_price REAL NOT NULL, + exit_time INTEGER, + exit_price REAL, + size_usd REAL NOT NULL, + pnl REAL, + exit_reason TEXT, + is_paper INTEGER NOT NULL DEFAULT 1 +); + +CREATE TABLE IF NOT EXISTS bot_state ( + key TEXT PRIMARY KEY, + value TEXT NOT NULL +); +""" + + +class Database: + def __init__(self, db_path: str | Path) -> None: + self.db_path = Path(db_path) + self.conn = sqlite3.connect(self.db_path) + self.conn.execute("PRAGMA journal_mode=WAL") + self.conn.executescript(_SCHEMA) + + def insert_candles(self, rows: list[dict]) -> int: + """Insert candles, skipping duplicates. Returns count inserted.""" + if not rows: + return 0 + cur = self.conn.executemany( + """INSERT OR IGNORE INTO candles + (symbol, timeframe, open_time, open, high, low, + close, volume, num_trades) + VALUES (:symbol, :timeframe, :open_time, :open, + :high, :low, :close, :volume, :num_trades)""", + rows, + ) + self.conn.commit() + return cur.rowcount + + def insert_funding_rates(self, rows: list[dict]) -> int: + if not rows: + return 0 + cur = self.conn.executemany( + """INSERT OR IGNORE INTO funding_rates (symbol, time, rate, premium) + VALUES (:symbol, :time, :rate, :premium)""", + rows, + ) + self.conn.commit() + return cur.rowcount + + def get_candles( + self, + symbol: str, + timeframe: str, + start_time: int | None = None, + limit: int = 5000, + ) -> list[dict]: + """Fetch candles ordered by open_time ascending.""" + query = "SELECT * FROM candles WHERE symbol = ? AND timeframe = ?" + params: list = [symbol, timeframe] + if start_time is not None: + query += " AND open_time >= ?" + params.append(start_time) + query += " ORDER BY open_time ASC LIMIT ?" + params.append(limit) + cur = self.conn.execute(query, params) + cols = [d[0] for d in cur.description] + return [dict(zip(cols, row)) for row in cur.fetchall()] + + def get_latest_candle_time(self, symbol: str, timeframe: str) -> int | None: + """Return the most recent open_time for incremental fetching.""" + cur = self.conn.execute( + "SELECT MAX(open_time) FROM candles WHERE symbol = ? AND timeframe = ?", + (symbol, timeframe), + ) + row = cur.fetchone() + return row[0] if row and row[0] is not None else None + + def insert_trade(self, trade: dict) -> int: + cur = self.conn.execute( + """INSERT INTO trades (symbol, side, entry_time, entry_price, size_usd, is_paper) + VALUES (:symbol, :side, :entry_time, :entry_price, :size_usd, :is_paper)""", + trade, + ) + self.conn.commit() + return cur.lastrowid + + def close_trade( + self, trade_id: int, exit_time: int, + exit_price: float, pnl: float, reason: str, + ) -> None: + self.conn.execute( + """UPDATE trades SET exit_time=?, exit_price=?, pnl=?, exit_reason=? WHERE id=?""", + (exit_time, exit_price, pnl, reason, trade_id), + ) + self.conn.commit() + + def get_open_trades(self, symbol: str | None = None) -> list[dict]: + query = "SELECT * FROM trades WHERE exit_time IS NULL" + params: list = [] + if symbol: + query += " AND symbol = ?" + params.append(symbol) + cur = self.conn.execute(query, params) + cols = [d[0] for d in cur.description] + return [dict(zip(cols, row)) for row in cur.fetchall()] + + def insert_prediction_snapshots(self, rows: list[dict]) -> int: + """Insert prediction snapshots, skipping duplicates.""" + if not rows: + return 0 + cur = self.conn.executemany( + """INSERT OR IGNORE INTO prediction_snapshots + (source, market_id, market_slug, category, + timestamp, probability, volume_24h) + VALUES (:source, :market_id, :market_slug, + :category, :timestamp, :probability, + :volume_24h)""", + rows, + ) + self.conn.commit() + return cur.rowcount + + def get_latest_predictions(self, source: str | None = None) -> list[dict]: + """Get the most recent snapshot per market_slug.""" + query = """ + SELECT ps.* FROM prediction_snapshots ps + INNER JOIN ( + SELECT market_slug, MAX(timestamp) AS max_ts + FROM prediction_snapshots + {where} + GROUP BY market_slug + ) latest ON ps.market_slug = latest.market_slug AND ps.timestamp = latest.max_ts + """ + params: list = [] + if source: + query = query.format(where="WHERE source = ?") + params.append(source) + else: + query = query.format(where="") + cur = self.conn.execute(query, params) + cols = [d[0] for d in cur.description] + return [dict(zip(cols, row)) for row in cur.fetchall()] + + def get_prediction_history( + self, market_slug: str, start_time: int | None = None, limit: int = 1000 + ) -> list[dict]: + """Get historical snapshots for a market, for backtesting.""" + query = "SELECT * FROM prediction_snapshots WHERE market_slug = ?" + params: list = [market_slug] + if start_time is not None: + query += " AND timestamp >= ?" + params.append(start_time) + query += " ORDER BY timestamp ASC LIMIT ?" + params.append(limit) + cur = self.conn.execute(query, params) + cols = [d[0] for d in cur.description] + return [dict(zip(cols, row)) for row in cur.fetchall()] + + def get_funding_rates( + self, symbol: str, start_time: int, end_time: int + ) -> list[dict]: + """Fetch funding rates for a symbol within a time range.""" + cur = self.conn.execute( + "SELECT * FROM funding_rates WHERE symbol = ?" + " AND time >= ? AND time <= ? ORDER BY time ASC", + (symbol, start_time, end_time), + ) + cols = [d[0] for d in cur.description] + return [dict(zip(cols, row)) for row in cur.fetchall()] + + def get_state(self, key: str) -> str | None: + """Get a value from the bot_state table.""" + cur = self.conn.execute( + "SELECT value FROM bot_state WHERE key = ?", (key,) + ) + row = cur.fetchone() + return row[0] if row else None + + def set_state(self, key: str, value: str) -> None: + """Set a value in the bot_state table (upsert).""" + self.conn.execute( + "INSERT OR REPLACE INTO bot_state (key, value) VALUES (?, ?)", + (key, value), + ) + self.conn.commit() + + def get_closed_trades_in_range( + self, start_ms: int, end_ms: int + ) -> list[dict]: + """Fetch trades closed within a time range.""" + cur = self.conn.execute( + "SELECT * FROM trades WHERE exit_time IS NOT NULL" + " AND exit_time >= ? AND exit_time <= ? ORDER BY exit_time ASC", + (start_ms, end_ms), + ) + cols = [d[0] for d in cur.description] + return [dict(zip(cols, row)) for row in cur.fetchall()] + + def get_daily_pnl(self, day_start_ms: int) -> float: + """Sum of realised P&L for trades closed since day_start_ms.""" + cur = self.conn.execute( + "SELECT COALESCE(SUM(pnl), 0) FROM trades WHERE exit_time >= ?", + (day_start_ms,), + ) + return cur.fetchone()[0] + + def close(self) -> None: + self.conn.close() diff --git a/src/perp_bot/data/ingest.py b/src/perp_bot/data/ingest.py new file mode 100644 index 0000000..ddd2be8 --- /dev/null +++ b/src/perp_bot/data/ingest.py @@ -0,0 +1,104 @@ +"""Orchestrates data fetching and storage — initial backfill + incremental updates.""" + +from __future__ import annotations + +import logging +import time + +from perp_bot.config import BotConfig, PredictionConfig +from perp_bot.data.client import HyperliquidClient +from perp_bot.data.db import Database + +logger = logging.getLogger(__name__) + + +class DataIngestor: + def __init__(self, config: BotConfig, db: Database, client: HyperliquidClient) -> None: + self.config = config + self.db = db + self.client = client + + def backfill_candles(self, symbol: str) -> dict[str, int]: + """Backfill historical candles for all configured timeframes. + + Returns dict of timeframe -> number of candles inserted. + """ + history_ms = self.config.data.history_days * 86_400_000 + default_start = int(time.time() * 1000) - history_ms + results = {} + + for tf in self.config.data.timeframes: + # Resume from where we left off, or start from history_days ago + latest = self.db.get_latest_candle_time(symbol, tf) + start = (latest + 1) if latest else default_start + + logger.info("Backfilling %s %s from %d", symbol, tf, start) + candles = self.client.fetch_candles(symbol, tf, start) + inserted = self.db.insert_candles(candles) + results[tf] = inserted + logger.info("Inserted %d candles for %s %s", inserted, symbol, tf) + + return results + + def backfill_funding(self, symbol: str) -> int: + """Backfill funding rate history.""" + history_ms = self.config.data.history_days * 86_400_000 + start = int(time.time() * 1000) - history_ms + + logger.info("Backfilling funding rates for %s", symbol) + rates = self.client.fetch_funding_history(symbol, start) + inserted = self.db.insert_funding_rates(rates) + logger.info("Inserted %d funding rates for %s", inserted, symbol) + return inserted + + def update_candles(self, symbol: str) -> dict[str, int]: + """Incremental update — fetch only new candles since last stored.""" + results = {} + for tf in self.config.data.timeframes: + latest = self.db.get_latest_candle_time(symbol, tf) + if latest is None: + logger.warning("No existing data for %s %s, run backfill first", symbol, tf) + continue + + candles = self.client.fetch_candles(symbol, tf, latest + 1) + inserted = self.db.insert_candles(candles) + results[tf] = inserted + if inserted: + logger.info("Updated %d candles for %s %s", inserted, symbol, tf) + return results + + def update_predictions(self, clients: dict, pred_config: PredictionConfig) -> int: + """Fetch latest prediction market data and insert into DB. + + Groups configured markets by source and fetches in batch per source. + Returns total number of snapshots inserted. + """ + markets_by_source: dict[str, list[dict]] = {} + for m in pred_config.markets: + markets_by_source.setdefault(m.source, []).append({ + "market_id": m.market_id, + "slug": m.slug, + "category": m.category, + }) + + total = 0 + for source, market_list in markets_by_source.items(): + client = clients.get(source) + if client is None: + logger.warning("No client for source %s, skipping", source) + continue + snapshots = client.fetch_markets(market_list) + inserted = self.db.insert_prediction_snapshots(snapshots) + total += inserted + if snapshots: + logger.info( + "Fetched %d snapshots from %s, inserted %d", + len(snapshots), source, inserted, + ) + return total + + def run_full_backfill(self) -> None: + """Backfill all configured symbols.""" + for symbol in self.config.trading.symbols: + self.backfill_candles(symbol) + self.backfill_funding(symbol) diff --git a/src/perp_bot/data/prediction_client.py b/src/perp_bot/data/prediction_client.py new file mode 100644 index 0000000..4385e32 --- /dev/null +++ b/src/perp_bot/data/prediction_client.py @@ -0,0 +1,104 @@ +"""Prediction market API clients — Polymarket + Kalshi.""" + +from __future__ import annotations + +import json +import logging +import time +import urllib.request + +logger = logging.getLogger(__name__) + +_POLYMARKET_BASE = "https://gamma-api.polymarket.com" +_KALSHI_BASE = "https://api.elections.kalshi.com/trade-api/v2" + + +class PolymarketClient: + """Fetch market probabilities from Polymarket's public API.""" + + def fetch_market(self, condition_id: str, slug: str, category: str) -> dict | None: + """Fetch a single market by condition ID. + + Returns normalised snapshot dict, or None on failure. + """ + url = f"{_POLYMARKET_BASE}/markets/{condition_id}" + try: + req = urllib.request.Request(url, headers={"Accept": "application/json"}) + with urllib.request.urlopen(req, timeout=15) as resp: + data = json.loads(resp.read().decode()) + + # Polymarket returns outcomePrices as JSON string "[\"0.65\",\"0.35\"]" + # First price is YES probability + prices = json.loads(data.get("outcomePrices", "[\"0.5\",\"0.5\"]")) + probability = float(prices[0]) if prices else 0.5 + volume = float(data.get("volume24hr", 0) or 0) + + return { + "source": "polymarket", + "market_id": condition_id, + "market_slug": slug, + "category": category, + "timestamp": int(time.time() * 1000), + "probability": probability, + "volume_24h": volume, + } + except Exception: + logger.warning("Polymarket API error for %s", condition_id, exc_info=True) + return None + + def fetch_markets( + self, markets: list[dict], + ) -> list[dict]: + """Fetch multiple markets. Each dict needs condition_id, slug, category. + + Returns list of successful snapshots (failures are skipped with a warning). + """ + results = [] + for m in markets: + snapshot = self.fetch_market(m["market_id"], m["slug"], m["category"]) + if snapshot is not None: + results.append(snapshot) + return results + + +class KalshiClient: + """Fetch market probabilities from Kalshi's public API.""" + + def fetch_market(self, ticker: str, slug: str, category: str) -> dict | None: + """Fetch a single market by ticker. + + Returns normalised snapshot dict, or None on failure. + """ + url = f"{_KALSHI_BASE}/markets/{ticker}" + try: + req = urllib.request.Request(url, headers={"Accept": "application/json"}) + with urllib.request.urlopen(req, timeout=15) as resp: + data = json.loads(resp.read().decode()) + + market = data.get("market", data) + # Kalshi prices are in cents (1-99), convert to 0-1 probability + yes_price = market.get("yes_ask", market.get("last_price", 50)) + probability = float(yes_price) / 100.0 if yes_price > 1 else float(yes_price) + volume = float(market.get("volume_24h", market.get("volume", 0)) or 0) + + return { + "source": "kalshi", + "market_id": ticker, + "market_slug": slug, + "category": category, + "timestamp": int(time.time() * 1000), + "probability": probability, + "volume_24h": volume, + } + except Exception: + logger.warning("Kalshi API error for %s", ticker, exc_info=True) + return None + + def fetch_markets(self, markets: list[dict]) -> list[dict]: + """Fetch multiple markets. Returns list of successful snapshots.""" + results = [] + for m in markets: + snapshot = self.fetch_market(m["market_id"], m["slug"], m["category"]) + if snapshot is not None: + results.append(snapshot) + return results diff --git a/src/perp_bot/data/ws_client.py b/src/perp_bot/data/ws_client.py new file mode 100644 index 0000000..5d8d198 --- /dev/null +++ b/src/perp_bot/data/ws_client.py @@ -0,0 +1,220 @@ +"""WebSocket client — real-time price, candle, and order update streaming.""" + +from __future__ import annotations + +import logging +import threading +import time +from typing import Callable + +from hyperliquid.info import Info +from hyperliquid.utils import constants + +logger = logging.getLogger(__name__) + + +class WsClient: + """Wraps Hyperliquid WebSocket subscriptions with thread-safe price cache. + + Usage: + ws = WsClient() + ws.subscribe_mid_prices(["ETH", "BTC"]) + ws.subscribe_candles("ETH", "15m", on_candle_callback) + + # Later, from any thread: + price = ws.get_mid_price("ETH") # returns cached value instantly + """ + + def __init__(self, base_url: str | None = None) -> None: + self._base_url = base_url or constants.MAINNET_API_URL + self.info = Info(self._base_url, skip_ws=False) + self._mid_prices: dict[str, float] = {} + self._lock = threading.Lock() + self._sub_ids: list[tuple[dict, int]] = [] + self._last_mid_update: float = 0.0 + self._stale_threshold_seconds: float = 30.0 + + # ── Mid price streaming ────────────────────────────── + + def subscribe_mid_prices(self, symbols: list[str] | None = None) -> None: + """Subscribe to allMids channel for real-time mid prices. + + If symbols is provided, only those are cached (saves memory). + Otherwise all mids are cached. + """ + self._watched_symbols = set(symbols) if symbols else None + + def _on_all_mids(msg: dict) -> None: + mids = msg.get("data", {}).get("mids", {}) + with self._lock: + if self._watched_symbols: + for sym in self._watched_symbols: + if sym in mids: + self._mid_prices[sym] = float(mids[sym]) + else: + for sym, px in mids.items(): + self._mid_prices[sym] = float(px) + self._last_mid_update = time.time() + + sub_id = self.info.subscribe({"type": "allMids"}, _on_all_mids) + self._sub_ids.append(({"type": "allMids"}, sub_id)) + logger.info("Subscribed to allMids WebSocket feed") + + def get_mid_price(self, symbol: str) -> float | None: + """Get the latest cached mid price. Returns None if not yet received.""" + with self._lock: + return self._mid_prices.get(symbol) + + # ── Candle streaming ───────────────────────────────── + + def subscribe_candles( + self, + symbol: str, + interval: str, + callback: Callable[[dict], None], + ) -> int: + """Subscribe to candle updates for a symbol/interval. + + The callback receives a normalised dict: + {symbol, timeframe, open_time, open, high, low, close, volume, num_trades} + """ + def _on_candle(msg: dict) -> None: + d = msg.get("data", {}) + try: + normalised = { + "symbol": d["s"], + "timeframe": d["i"], + "open_time": d["t"], + "open": float(d["o"]), + "high": float(d["h"]), + "low": float(d["l"]), + "close": float(d["c"]), + "volume": float(d["v"]), + "num_trades": int(d["n"]), + } + callback(normalised) + except (KeyError, ValueError): + logger.warning("Malformed candle message: %s", d) + + sub = {"type": "candle", "coin": symbol, "interval": interval} + sub_id = self.info.subscribe(sub, _on_candle) + self._sub_ids.append((sub, sub_id)) + logger.info("Subscribed to %s %s candle feed", symbol, interval) + return sub_id + + # ── L2 book streaming ──────────────────────────────── + + def subscribe_bbo( + self, + symbol: str, + callback: Callable[[float, float], None], + ) -> int: + """Subscribe to best bid/offer for a symbol. + + Callback receives (best_bid, best_ask). + """ + def _on_bbo(msg: dict) -> None: + data = msg.get("data", {}) + bbo = data.get("bbo", [None, None]) + try: + bid = float(bbo[0]["px"]) if bbo[0] else None + ask = float(bbo[1]["px"]) if bbo[1] else None + if bid is not None and ask is not None: + callback(bid, ask) + except (KeyError, TypeError, IndexError): + pass + + sub = {"type": "bbo", "coin": symbol} + sub_id = self.info.subscribe(sub, _on_bbo) + self._sub_ids.append((sub, sub_id)) + logger.info("Subscribed to %s BBO feed", symbol) + return sub_id + + # ── Order update streaming ─────────────────────────── + + def subscribe_order_updates( + self, + user_address: str, + callback: Callable[[dict], None], + ) -> int: + """Subscribe to order status updates for a user. + + Only one subscriber per Info instance is allowed. + """ + sub = {"type": "orderUpdates", "user": user_address} + sub_id = self.info.subscribe(sub, callback) + self._sub_ids.append((sub, sub_id)) + logger.info("Subscribed to order updates for %s", user_address[:10]) + return sub_id + + # ── User fill streaming ────────────────────────────── + + def subscribe_user_fills( + self, + user_address: str, + callback: Callable[[list[dict]], None], + ) -> int: + """Subscribe to user fill events.""" + def _on_fills(msg: dict) -> None: + data = msg.get("data", {}) + fills = data.get("fills", []) + if fills: + callback(fills) + + sub = {"type": "userFills", "user": user_address} + sub_id = self.info.subscribe(sub, _on_fills) + self._sub_ids.append((sub, sub_id)) + logger.info("Subscribed to user fills for %s", user_address[:10]) + return sub_id + + # ── Health check ────────────────────────────────────── + + def is_healthy(self) -> bool: + """Check if the WebSocket is receiving data. + + Returns False if no mid price update has been received within + the stale threshold, indicating the connection may be dead. + """ + if self._last_mid_update == 0.0: + return True # No subscriptions yet or just started + elapsed = time.time() - self._last_mid_update + return elapsed < self._stale_threshold_seconds + + def reconnect(self) -> None: + """Tear down and recreate the WebSocket connection, re-subscribing.""" + logger.warning("Reconnecting WebSocket — stale data detected") + old_subs = list(self._sub_ids) + self._sub_ids.clear() + + for sub, sub_id in old_subs: + try: + self.info.unsubscribe(sub, sub_id) + except Exception: + pass + + # Recreate Info with fresh WebSocket + self.info = Info(self._base_url, skip_ws=False) + with self._lock: + self._last_mid_update = 0.0 + + # Re-subscribe mid prices if they were active + watched = getattr(self, "_watched_symbols", None) + if watched is not None or any( + s.get("type") == "allMids" for s, _ in old_subs + ): + symbols = list(watched) if watched else None + self.subscribe_mid_prices(symbols) + + logger.info("WebSocket reconnected and re-subscribed") + + # ── Lifecycle ──────────────────────────────────────── + + def close(self) -> None: + """Unsubscribe all and close the WebSocket connection.""" + for sub, sub_id in self._sub_ids: + try: + self.info.unsubscribe(sub, sub_id) + except Exception: + pass + self._sub_ids.clear() + logger.info("WebSocket client closed") diff --git a/src/perp_bot/execution/__init__.py b/src/perp_bot/execution/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/perp_bot/execution/executor.py b/src/perp_bot/execution/executor.py new file mode 100644 index 0000000..d8513bc --- /dev/null +++ b/src/perp_bot/execution/executor.py @@ -0,0 +1,61 @@ +"""Order execution — paper and live modes.""" + +from __future__ import annotations + +import logging +import time +from abc import ABC, abstractmethod + +from perp_bot.data.db import Database + +logger = logging.getLogger(__name__) + + +class Executor(ABC): + """Abstract executor interface.""" + + @abstractmethod + def open_position(self, symbol: str, side: str, size_usd: float, price: float) -> int | None: + """Place an entry order. Returns trade_id from DB, or None on failure.""" + + @abstractmethod + def close_position( + self, trade_id: int, symbol: str, + price: float, pnl: float, reason: str, + ) -> bool: + """Close an existing position. Returns True on success.""" + + +class PaperExecutor(Executor): + """Logs trades to DB without placing real orders — for paper trading.""" + + def __init__(self, db: Database) -> None: + self.db = db + + def open_position(self, symbol: str, side: str, size_usd: float, price: float) -> int | None: + now = int(time.time() * 1000) + trade_id = self.db.insert_trade({ + "symbol": symbol, + "side": side, + "entry_time": now, + "entry_price": price, + "size_usd": size_usd, + "is_paper": 1, + }) + logger.info( + "[PAPER] Opened %s %s @ %.2f, size $%.2f (trade #%d)", + side, symbol, price, size_usd, trade_id, + ) + return trade_id + + def close_position( + self, trade_id: int, symbol: str, + price: float, pnl: float, reason: str, + ) -> bool: + now = int(time.time() * 1000) + self.db.close_trade(trade_id, now, price, pnl, reason) + logger.info( + "[PAPER] Closed trade #%d %s @ %.2f, pnl=%.2f, reason=%s", + trade_id, symbol, price, pnl, reason, + ) + return True diff --git a/src/perp_bot/execution/live_executor.py b/src/perp_bot/execution/live_executor.py new file mode 100644 index 0000000..ef649d4 --- /dev/null +++ b/src/perp_bot/execution/live_executor.py @@ -0,0 +1,563 @@ +"""Live executor — places real orders on Hyperliquid via the Exchange API.""" + +from __future__ import annotations + +import logging +import time +from typing import Any + +import eth_account +from hyperliquid.exchange import Exchange +from hyperliquid.info import Info +from hyperliquid.utils import constants + +from perp_bot.config import BotConfig +from perp_bot.data.db import Database +from perp_bot.execution.executor import Executor + +logger = logging.getLogger(__name__) + +# Price rounding to avoid rejection (Hyperliquid requires specific tick sizes) +_SIGNIFICANT_FIGURES = 5 + + +def _round_price(price: float) -> float: + """Round price to 5 significant figures for Hyperliquid compatibility.""" + if price == 0: + return 0.0 + return float(f"{price:.{_SIGNIFICANT_FIGURES}g}") + + +def _round_size(size: float, sz_decimals: int) -> float: + """Round size to the asset's allowed decimal places.""" + return round(size, sz_decimals) + + +class LiveExecutor(Executor): + """Places real orders on Hyperliquid with limit-first, taker-fallback strategy. + + Features: + - GTC limit entry with taker fallback after timeout + - Server-side stop-loss attached atomically to entry orders + - Position tracking via DB for crash recovery + - Slippage-bounded market orders (IOC with price limit) + """ + + def __init__( + self, + config: BotConfig, + db: Database, + base_url: str | None = None, + ) -> None: + self.config = config + self.db = db + self._base_url = base_url or constants.MAINNET_API_URL + + # Initialise exchange connection + wallet = eth_account.Account.from_key(config.hl_private_key) + self._exchange = Exchange(wallet, base_url=self._base_url) + self._info: Info = self._exchange.info + self._wallet_address = ( + config.hl_wallet_address or wallet.address + ) + + # Cache asset metadata for size rounding + self._sz_decimals: dict[str, int] = {} + self._sl_failed: bool = False # Set True when SL placement fails + self._slippage_history: list[float] = [] # Track fill vs expected + self._load_asset_meta() + + logger.info( + "LiveExecutor initialised — wallet=%s, url=%s", + self._wallet_address[:10], self._base_url, + ) + + def _load_asset_meta(self) -> None: + """Load size decimal precision for each asset.""" + try: + meta = self._info.meta_and_asset_ctxs() + if meta and len(meta) >= 1: + for asset in meta[0].get("universe", []): + name = asset["name"] + self._sz_decimals[name] = asset.get("szDecimals", 3) + except Exception: + logger.warning("Failed to load asset metadata, using default precision") + + def _get_sz_decimals(self, symbol: str) -> int: + return self._sz_decimals.get(symbol, 3) + + def _get_mid_price(self, symbol: str) -> float: + """Fetch current mid price via REST.""" + l2 = self._info.l2_snapshot(symbol) + best_bid = float(l2["levels"][0][0]["px"]) + best_ask = float(l2["levels"][1][0]["px"]) + return (best_bid + best_ask) / 2 + + # ── Entry ──────────────────────────────────────────── + + def open_position( + self, + symbol: str, + side: str, + size_usd: float, + price: float, + ) -> int | None: + """Open a position with limit-first, taker-fallback strategy. + + Also places a server-side stop-loss for redundancy. + Returns trade_id from DB, or None on failure. + Sets self._sl_failed = True if the position has no server-side SL. + """ + self._sl_failed = False + is_buy = side == "long" + sz_decimals = self._get_sz_decimals(symbol) + size_base = _round_size(size_usd / price, sz_decimals) + + if size_base <= 0: + logger.error("Computed size_base <= 0 for %s", symbol) + return None + + # Compute stop-loss price + sl_pct = self.config.risk.max_loss_per_trade_pct + if is_buy: + sl_price = _round_price(price * (1 - sl_pct)) + else: + sl_price = _round_price(price * (1 + sl_pct)) + + limit_price = _round_price(price) + + # Try limit order with server-side SL attached + fill_price = self._try_limit_with_sl( + symbol, is_buy, size_base, limit_price, sl_price, + ) + + if fill_price is None: + # Fallback: taker (IOC) order + logger.info("Limit unfilled after timeout, falling back to taker") + fill_price = self._try_taker(symbol, is_buy, size_base, price) + + if fill_price is None: + logger.error("Taker order also failed for %s %s", side, symbol) + return None + + # Place server-side SL separately since the bulk entry failed + sl_ok = self._place_server_side_sl( + symbol, not is_buy, size_base, sl_price, + ) + if not sl_ok: + self._sl_failed = True + logger.critical( + "UNHEDGED POSITION: %s %s has no server-side SL!", + side, symbol, + ) + + # Track slippage: actual fill vs requested price + self._record_slippage(price, fill_price, is_buy) + + # Record in DB + now = int(time.time() * 1000) + trade_id = self.db.insert_trade({ + "symbol": symbol, + "side": side, + "entry_time": now, + "entry_price": fill_price, + "size_usd": fill_price * size_base, + "is_paper": 0, + }) + logger.info( + "[LIVE] Opened %s %s @ %.2f, size=%.4f %s ($%.2f) (trade #%d)", + side, symbol, fill_price, size_base, symbol, + fill_price * size_base, trade_id, + ) + return trade_id + + def _try_limit_with_sl( + self, + symbol: str, + is_buy: bool, + size_base: float, + limit_price: float, + sl_price: float, + ) -> float | None: + """Place GTC limit entry + server-side SL atomically. + + Waits up to taker_fallback_seconds for fill. + Returns fill price, or None if unfilled (cancels the order). + """ + orders = [ + { + "coin": symbol, + "is_buy": is_buy, + "sz": size_base, + "limit_px": limit_price, + "order_type": {"limit": {"tif": "Gtc"}}, + "reduce_only": False, + }, + { + "coin": symbol, + "is_buy": not is_buy, + "sz": size_base, + "limit_px": sl_price, + "order_type": { + "trigger": { + "triggerPx": sl_price, + "isMarket": True, + "tpsl": "sl", + } + }, + "reduce_only": True, + }, + ] + + try: + result = self._exchange.bulk_orders(orders, grouping="normalTpsl") + logger.debug("bulk_orders result: %s", result) + except Exception: + logger.exception("bulk_orders failed") + return None + + # Check if entry order was immediately filled + immediate_fill = self._extract_fill_price(result) + if immediate_fill is not None: + logger.info("Entry order filled immediately at %.2f", immediate_fill) + return immediate_fill + + # Order is resting — extract OID and wait for fill + entry_oid = self._extract_oid(result, index=0) + if entry_oid is None: + logger.warning("Could not extract entry OID from result") + return None + + # Wait for fill + timeout = self.config.execution.taker_fallback_seconds + fill_price = self._wait_for_fill(symbol, entry_oid, timeout) + + if fill_price is None: + # Cancel the unfilled limit order (SL may remain — that's fine) + try: + self._exchange.cancel(symbol, entry_oid) + logger.info("Cancelled unfilled limit order %d", entry_oid) + except Exception: + logger.warning("Failed to cancel order %d", entry_oid) + # Also cancel the SL since the entry didn't fill + self._cancel_open_trigger_orders(symbol) + + return fill_price + + def _try_taker( + self, + symbol: str, + is_buy: bool, + size_base: float, + reference_price: float, + ) -> float | None: + """Place an IOC (taker) order with bounded slippage. + + Returns fill price, or None on failure. + """ + # Allow 0.5% slippage from reference price + slippage = 0.005 + if is_buy: + limit_px = _round_price(reference_price * (1 + slippage)) + else: + limit_px = _round_price(reference_price * (1 - slippage)) + + try: + result = self._exchange.order( + symbol, + is_buy=is_buy, + sz=size_base, + limit_px=limit_px, + order_type={"limit": {"tif": "Ioc"}}, + ) + logger.debug("IOC order result: %s", result) + except Exception: + logger.exception("IOC order failed") + return None + + return self._extract_fill_price(result) + + def _place_server_side_sl( + self, + symbol: str, + is_buy: bool, + size_base: float, + sl_price: float, + ) -> bool: + """Place a standalone server-side stop-loss trigger order. + + Retries once on failure. Returns True on success. + """ + for attempt in range(2): + try: + self._exchange.order( + symbol, + is_buy=is_buy, + sz=size_base, + limit_px=sl_price, + order_type={ + "trigger": { + "triggerPx": sl_price, + "isMarket": True, + "tpsl": "sl", + } + }, + reduce_only=True, + ) + logger.info("Placed server-side SL at %.2f for %s", sl_price, symbol) + return True + except Exception: + if attempt == 0: + logger.warning("SL placement failed, retrying once...") + time.sleep(1) + else: + logger.exception("CRITICAL: Server-side SL placement failed after retry") + return False + + # ── Exit ───────────────────────────────────────────── + + def close_position( + self, + trade_id: int, + symbol: str, + price: float, + pnl: float, + reason: str, + ) -> bool: + """Close a position using market_close, then update DB. + + Returns True on success. + """ + # Cancel any remaining trigger orders (server-side SL) + self._cancel_open_trigger_orders(symbol) + + # Use market close for immediate execution + try: + result = self._exchange.market_close( + symbol, slippage=0.01, # 1% slippage tolerance + ) + logger.debug("market_close result: %s", result) + except Exception: + logger.exception("market_close failed for %s", symbol) + return False + + fill_price = self._extract_fill_price(result) + if fill_price is None: + # Fallback: use the provided price estimate + fill_price = price + logger.warning( + "Could not extract fill price from close, using estimate %.2f", + price, + ) + + # Recalculate actual PnL from fill + trade = self._get_trade(trade_id) + if trade: + actual_pnl = self._calc_pnl(trade, fill_price) + else: + actual_pnl = pnl + + now = int(time.time() * 1000) + self.db.close_trade(trade_id, now, fill_price, actual_pnl, reason) + logger.info( + "[LIVE] Closed trade #%d %s @ %.2f, pnl=%.2f, reason=%s", + trade_id, symbol, fill_price, actual_pnl, reason, + ) + return True + + # ── Helpers ────────────────────────────────────────── + + def _wait_for_fill( + self, symbol: str, oid: int, timeout_seconds: int + ) -> float | None: + """Poll order status until filled or timeout. Returns fill price.""" + deadline = time.time() + timeout_seconds + poll_interval = 2.0 # seconds + + while time.time() < deadline: + try: + status = self._info.query_order_by_oid( + self._wallet_address, oid + ) + order = status.get("order", {}) + order_status = order.get("status", "") + + if order_status == "filled": + return self._get_fill_price(symbol, oid) + + if order_status in ("canceled", "rejected"): + logger.warning("Order %d was %s", oid, order_status) + return None + + except Exception: + logger.debug("Error polling order %d status", oid) + + time.sleep(poll_interval) + + return None + + def _extract_oid(self, result: Any, index: int = 0) -> int | None: + """Extract order ID from API response.""" + try: + statuses = result.get("response", {}).get("data", {}).get( + "statuses", [] + ) + if index < len(statuses): + status = statuses[index] + if "resting" in status: + return status["resting"]["oid"] + if "filled" in status: + return status["filled"]["oid"] + except (AttributeError, KeyError, TypeError): + pass + return None + + def _extract_fill_price(self, result: Any) -> float | None: + """Extract fill price from an order result.""" + try: + statuses = result.get("response", {}).get("data", {}).get( + "statuses", [] + ) + if statuses: + status = statuses[0] + if "filled" in status: + return float(status["filled"]["avgPx"]) + except (AttributeError, KeyError, TypeError, ValueError): + pass + return None + + def _get_fill_price(self, symbol: str, oid: int) -> float | None: + """Get the actual average fill price for an order using user_fills_by_time. + + This queries the fills API rather than reading limitPx, which would + give the requested price instead of the actual execution price. + """ + try: + # Query recent fills (last 60 seconds should be sufficient) + start_time = int((time.time() - 60) * 1000) + fills = self._info.user_fills_by_time( + self._wallet_address, start_time + ) + # Find fills matching this order + order_fills = [ + f for f in fills + if f.get("oid") == oid and f.get("coin") == symbol + ] + if order_fills: + # Compute volume-weighted average price + total_sz = sum(float(f["sz"]) for f in order_fills) + if total_sz > 0: + vwap = sum( + float(f["px"]) * float(f["sz"]) + for f in order_fills + ) / total_sz + return vwap + except Exception: + logger.warning("Failed to get fill price via user_fills_by_time") + return None + + def _cancel_open_trigger_orders(self, symbol: str) -> None: + """Cancel all open trigger orders (server-side SL/TP) for a symbol.""" + try: + open_orders = self._info.frontend_open_orders(self._wallet_address) + for order in open_orders: + if ( + order.get("coin") == symbol + and order.get("orderType") == "trigger" + ): + self._exchange.cancel(symbol, order["oid"]) + logger.debug("Cancelled trigger order %d", order["oid"]) + except Exception: + logger.warning("Failed to cancel trigger orders for %s", symbol) + + def _get_trade(self, trade_id: int) -> dict | None: + """Fetch a trade record from DB.""" + cur = self.db.conn.execute( + "SELECT * FROM trades WHERE id = ?", (trade_id,) + ) + cols = [d[0] for d in cur.description] + row = cur.fetchone() + return dict(zip(cols, row)) if row else None + + @staticmethod + def _calc_pnl(trade: dict, exit_price: float) -> float: + if trade["side"] == "long": + return ( + (exit_price - trade["entry_price"]) + / trade["entry_price"] + * trade["size_usd"] + ) + return ( + (trade["entry_price"] - exit_price) + / trade["entry_price"] + * trade["size_usd"] + ) + + # ── Slippage monitoring ──────────────────────────────── + + def _record_slippage( + self, expected: float, actual: float, is_buy: bool, + ) -> None: + """Record slippage for a fill and warn if excessive.""" + if is_buy: + slippage_pct = (actual - expected) / expected * 100 + else: + slippage_pct = (expected - actual) / expected * 100 + self._slippage_history.append(slippage_pct) + logger.info( + "Slippage: expected=%.2f actual=%.2f slip=%.4f%%", + expected, actual, slippage_pct, + ) + if abs(slippage_pct) > 0.1: + logger.warning( + "High slippage detected: %.4f%%", slippage_pct, + ) + + def get_slippage_stats(self) -> dict: + """Return slippage statistics for monitoring.""" + if not self._slippage_history: + return {"count": 0, "avg_pct": 0.0, "max_pct": 0.0} + return { + "count": len(self._slippage_history), + "avg_pct": sum(self._slippage_history) + / len(self._slippage_history), + "max_pct": max( + abs(s) for s in self._slippage_history + ), + } + + # ── Position queries ───────────────────────────────── + + def get_exchange_position(self, symbol: str) -> dict | None: + """Query the exchange for current position state (crash recovery).""" + try: + state = self._info.user_state(self._wallet_address) + for pos in state.get("assetPositions", []): + position = pos.get("position", {}) + if position.get("coin") == symbol: + szi = float(position.get("szi", "0")) + if szi != 0: + return { + "symbol": symbol, + "side": "long" if szi > 0 else "short", + "size_base": abs(szi), + "entry_price": float( + position.get("entryPx", "0") + ), + "unrealized_pnl": float( + position.get("unrealizedPnl", "0") + ), + } + except Exception: + logger.exception("Failed to query exchange position for %s", symbol) + return None + + def set_leverage(self, symbol: str, leverage: int) -> bool: + """Set leverage for a symbol on the exchange. Returns True on success.""" + try: + self._exchange.update_leverage(leverage, symbol, is_cross=True) + logger.info("Set leverage for %s to %dx (cross)", symbol, leverage) + return True + except Exception: + logger.exception("Failed to set leverage for %s", symbol) + return False diff --git a/src/perp_bot/infra/__init__.py b/src/perp_bot/infra/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/perp_bot/infra/alerts.py b/src/perp_bot/infra/alerts.py new file mode 100644 index 0000000..26b61b2 --- /dev/null +++ b/src/perp_bot/infra/alerts.py @@ -0,0 +1,49 @@ +"""Alert dispatch — Discord webhook + Telegram Bot API.""" + +from __future__ import annotations + +import json +import logging +import urllib.parse +import urllib.request + +logger = logging.getLogger(__name__) + + +def send_discord_alert(webhook_url: str, message: str) -> bool: + """Send a message to a Discord channel via webhook. Returns True on success.""" + if not webhook_url: + logger.debug("No Discord webhook configured, skipping alert") + return False + + payload = json.dumps({"content": message}).encode() + req = urllib.request.Request( + webhook_url, + data=payload, + headers={"Content-Type": "application/json"}, + ) + try: + with urllib.request.urlopen(req, timeout=10) as resp: + return resp.status == 204 + except Exception: + logger.exception("Failed to send Discord alert") + return False + + +def send_telegram_alert(bot_token: str, chat_id: str, message: str) -> bool: + """Send a message via Telegram Bot API. Returns True on success.""" + if not bot_token or not chat_id: + logger.debug("Telegram not configured, skipping alert") + return False + + url = ( + f"https://api.telegram.org/bot{bot_token}/sendMessage?" + + urllib.parse.urlencode({"chat_id": chat_id, "text": message}) + ) + req = urllib.request.Request(url) + try: + with urllib.request.urlopen(req, timeout=10) as resp: + return resp.status == 200 + except Exception: + logger.exception("Failed to send Telegram alert") + return False diff --git a/src/perp_bot/infra/health.py b/src/perp_bot/infra/health.py new file mode 100644 index 0000000..bda2568 --- /dev/null +++ b/src/perp_bot/infra/health.py @@ -0,0 +1,79 @@ +"""Periodic health check — heartbeat with system status.""" + +from __future__ import annotations + +import logging +import time + +from perp_bot.data.db import Database +from perp_bot.data.ws_client import WsClient +from perp_bot.infra.alerts import send_discord_alert, send_telegram_alert + +logger = logging.getLogger(__name__) + + +class HealthChecker: + """Sends periodic heartbeat messages with bot status.""" + + def __init__( + self, + config, + db: Database, + ws_client: WsClient | None = None, + executor=None, + interval_minutes: int = 30, + ) -> None: + self._config = config + self._db = db + self._ws_client = ws_client + self._executor = executor + self._interval_ms = interval_minutes * 60 * 1000 + self._last_heartbeat_ms: int = 0 + + def tick(self, current_regime: str = "NORMAL") -> None: + """Call on each main loop iteration. Sends heartbeat if interval has elapsed.""" + now_ms = int(time.time() * 1000) + if now_ms - self._last_heartbeat_ms < self._interval_ms: + return + + self._last_heartbeat_ms = now_ms + status = self._build_status(current_regime) + + logger.info("HEARTBEAT: %s", status) + if self._config.discord_webhook_url: + send_discord_alert(self._config.discord_webhook_url, status) + if self._config.telegram_bot_token and self._config.telegram_chat_id: + send_telegram_alert( + self._config.telegram_bot_token, + self._config.telegram_chat_id, + status, + ) + + def _build_status(self, regime: str) -> str: + open_trades = self._db.get_open_trades() + ws_ok = self._ws_client.is_healthy() if self._ws_client else "N/A" + + parts = [ + f"mode={self._config.mode}", + f"regime={regime}", + f"open_positions={len(open_trades)}", + f"ws_healthy={ws_ok}", + ] + + if open_trades: + t = open_trades[0] + parts.append( + f"pos={t['side']} {t['symbol']}" + f" @ {t['entry_price']:.2f}" + ) + + # Slippage stats from live executor + if self._executor and hasattr(self._executor, "get_slippage_stats"): + stats = self._executor.get_slippage_stats() + if stats["count"] > 0: + parts.append( + f"slip_avg={stats['avg_pct']:.4f}%" + f" max={stats['max_pct']:.4f}%" + ) + + return " | ".join(parts) diff --git a/src/perp_bot/infra/logging.py b/src/perp_bot/infra/logging.py new file mode 100644 index 0000000..7b2b28c --- /dev/null +++ b/src/perp_bot/infra/logging.py @@ -0,0 +1,41 @@ +"""JSON-structured logging setup.""" + +from __future__ import annotations + +import json +import logging +import sys +from datetime import datetime, timezone + + +class JsonFormatter(logging.Formatter): + def format(self, record: logging.LogRecord) -> str: + log_entry = { + "ts": datetime.fromtimestamp(record.created, tz=timezone.utc).isoformat(), + "level": record.levelname, + "logger": record.name, + "msg": record.getMessage(), + } + if record.exc_info and record.exc_info[1]: + log_entry["exception"] = self.formatException(record.exc_info) + return json.dumps(log_entry) + + +def setup_logging(level: str = "INFO", log_file: str | None = None) -> None: + formatter = JsonFormatter() + root = logging.getLogger() + root.setLevel(getattr(logging, level.upper(), logging.INFO)) + root.handlers.clear() + + stdout_handler = logging.StreamHandler(sys.stdout) + stdout_handler.setFormatter(formatter) + root.addHandler(stdout_handler) + + if log_file: + from logging.handlers import RotatingFileHandler + + file_handler = RotatingFileHandler( + log_file, maxBytes=5 * 1024 * 1024, backupCount=3, + ) + file_handler.setFormatter(formatter) + root.addHandler(file_handler) diff --git a/src/perp_bot/ipc/__init__.py b/src/perp_bot/ipc/__init__.py new file mode 100644 index 0000000..fa9af31 --- /dev/null +++ b/src/perp_bot/ipc/__init__.py @@ -0,0 +1 @@ +"""IPC layer — Unix socket communication between daemon and TUI.""" diff --git a/src/perp_bot/ipc/client.py b/src/perp_bot/ipc/client.py new file mode 100644 index 0000000..321b236 --- /dev/null +++ b/src/perp_bot/ipc/client.py @@ -0,0 +1,80 @@ +"""Client for communicating with the daemon via Unix socket.""" + +from __future__ import annotations + +import json +import socket +from pathlib import Path + +from perp_bot.ipc.protocol import ( + CMD_EMERGENCY_CLOSE, + CMD_GET_STATE, + CMD_PAUSE, + CMD_RESUME, +) + +_RECV_BUF = 65536 +_TIMEOUT = 2.0 + + +class DaemonClient: + """Connects to the daemon's Unix socket for state queries and commands. + + All methods return None on connection failure for graceful degradation. + """ + + def __init__(self, socket_path: Path) -> None: + self._socket_path = socket_path + + def is_running(self) -> bool: + """Check if the daemon is reachable.""" + if not self._socket_path.exists(): + return False + result = self._request({"cmd": CMD_GET_STATE}) + return result is not None and result.get("ok", False) + + def get_state(self) -> dict | None: + """Fetch full daemon state snapshot.""" + result = self._request({"cmd": CMD_GET_STATE}) + if result and result.get("ok"): + return result.get("state") + return None + + def pause(self) -> dict | None: + """Pause the trading loop.""" + return self._request({"cmd": CMD_PAUSE}) + + def resume(self) -> dict | None: + """Resume the trading loop.""" + return self._request({"cmd": CMD_RESUME}) + + def emergency_close(self, symbol: str) -> dict | None: + """Emergency-close all positions for a symbol.""" + return self._request({"cmd": CMD_EMERGENCY_CLOSE, "symbol": symbol}) + + def _request(self, msg: dict) -> dict | None: + """Send a JSON request and receive a JSON response. + + Returns None on any connection failure. + """ + try: + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + sock.settimeout(_TIMEOUT) + sock.connect(str(self._socket_path)) + sock.sendall((json.dumps(msg) + "\n").encode("utf-8")) + + # Receive response + chunks = [] + while True: + chunk = sock.recv(_RECV_BUF) + if not chunk: + break + chunks.append(chunk) + if b"\n" in chunk: + break + + sock.close() + data = b"".join(chunks).decode("utf-8").strip() + return json.loads(data) if data else None + except (OSError, json.JSONDecodeError, ConnectionRefusedError): + return None diff --git a/src/perp_bot/ipc/protocol.py b/src/perp_bot/ipc/protocol.py new file mode 100644 index 0000000..8bd4bef --- /dev/null +++ b/src/perp_bot/ipc/protocol.py @@ -0,0 +1,22 @@ +"""Shared IPC constants and helpers.""" + +from __future__ import annotations + +from pathlib import Path + +SOCKET_NAME = "perp-bot.sock" + +# Command constants +CMD_GET_STATE = "get_state" +CMD_PAUSE = "pause" +CMD_RESUME = "resume" +CMD_EMERGENCY_CLOSE = "emergency_close" + + +def get_socket_path(db_path: str | Path) -> Path: + """Derive the socket path from the database directory. + + The socket lives alongside the SQLite DB so that the TUI can + locate it from the same config. + """ + return Path(db_path).parent / SOCKET_NAME diff --git a/src/perp_bot/ipc/server.py b/src/perp_bot/ipc/server.py new file mode 100644 index 0000000..94ab71f --- /dev/null +++ b/src/perp_bot/ipc/server.py @@ -0,0 +1,177 @@ +"""Threaded Unix socket server for daemon state exposure and command dispatch.""" + +from __future__ import annotations + +import json +import logging +import socket +import threading +from pathlib import Path + +from perp_bot.ipc.protocol import ( + CMD_EMERGENCY_CLOSE, + CMD_GET_STATE, + CMD_PAUSE, + CMD_RESUME, +) +from perp_bot.ipc.state import DaemonState + +logger = logging.getLogger(__name__) + +_RECV_BUF = 4096 + + +class DaemonStateServer: + """Unix socket server that exposes daemon state and accepts commands. + + Runs in a daemon thread — auto-dies with the main process. + Each connection is stateless: read one JSON line, dispatch, respond, close. + """ + + def __init__( + self, + socket_path: Path, + state: DaemonState, + executor=None, + db=None, + ) -> None: + self._socket_path = socket_path + self._state = state + self._executor = executor + self._db = db + self._server_sock: socket.socket | None = None + self._thread: threading.Thread | None = None + self._running = False + + def start(self) -> None: + """Start the server thread.""" + # Clean up stale socket file + if self._socket_path.exists(): + try: + self._socket_path.unlink() + except OSError: + logger.warning("Could not remove stale socket %s", self._socket_path) + + self._server_sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + self._server_sock.bind(str(self._socket_path)) + self._server_sock.listen(5) + self._server_sock.settimeout(1.0) # Allow periodic shutdown checks + self._running = True + + self._thread = threading.Thread( + target=self._serve_loop, name="ipc-server", daemon=True, + ) + self._thread.start() + logger.info("IPC server listening on %s", self._socket_path) + + def stop(self) -> None: + """Stop the server and clean up the socket file.""" + self._running = False + if self._server_sock: + try: + self._server_sock.close() + except OSError: + pass + if self._thread: + self._thread.join(timeout=3) + if self._socket_path.exists(): + try: + self._socket_path.unlink() + except OSError: + pass + logger.info("IPC server stopped") + + def _serve_loop(self) -> None: + """Accept connections in a loop until stopped.""" + while self._running: + try: + conn, _ = self._server_sock.accept() + except socket.timeout: + continue + except OSError: + if self._running: + logger.debug("Socket accept error") + break + + try: + self._handle_connection(conn) + except Exception: + logger.debug("Error handling IPC connection", exc_info=True) + finally: + conn.close() + + def _handle_connection(self, conn: socket.socket) -> None: + """Read one JSON command, dispatch, write JSON response, close.""" + conn.settimeout(5.0) + data = conn.recv(_RECV_BUF) + if not data: + return + + line = data.decode("utf-8").strip() + try: + request = json.loads(line) + except json.JSONDecodeError: + self._send_response(conn, {"error": "invalid_json"}) + return + + cmd = request.get("cmd", "") + response = self._dispatch(cmd, request) + self._send_response(conn, response) + + def _dispatch(self, cmd: str, request: dict) -> dict: + if cmd == CMD_GET_STATE: + return {"ok": True, "state": self._state.snapshot()} + + if cmd == CMD_PAUSE: + self._state.update(paused=True) + logger.info("Daemon PAUSED via IPC") + return {"ok": True, "paused": True} + + if cmd == CMD_RESUME: + self._state.update(paused=False) + logger.info("Daemon RESUMED via IPC") + return {"ok": True, "paused": False} + + if cmd == CMD_EMERGENCY_CLOSE: + return self._handle_emergency_close(request) + + return {"ok": False, "error": f"unknown_command: {cmd}"} + + def _handle_emergency_close(self, request: dict) -> dict: + symbol = request.get("symbol") + if not symbol: + return {"ok": False, "error": "symbol_required"} + + if not self._executor or not self._db: + return {"ok": False, "error": "executor_not_available"} + + open_trades = self._db.get_open_trades(symbol) + if not open_trades: + return {"ok": True, "message": f"no_open_trades_for_{symbol}"} + + closed = 0 + for trade in open_trades: + try: + self._executor.close_position( + trade["id"], symbol, trade["entry_price"], 0.0, + "emergency_close_ipc", + ) + closed += 1 + except Exception: + logger.exception( + "Failed to emergency-close trade #%d", trade["id"], + ) + + logger.warning( + "EMERGENCY CLOSE: closed %d/%d trades for %s", + closed, len(open_trades), symbol, + ) + return {"ok": True, "closed": closed, "total": len(open_trades)} + + @staticmethod + def _send_response(conn: socket.socket, response: dict) -> None: + try: + payload = json.dumps(response) + "\n" + conn.sendall(payload.encode("utf-8")) + except (BrokenPipeError, OSError): + pass diff --git a/src/perp_bot/ipc/state.py b/src/perp_bot/ipc/state.py new file mode 100644 index 0000000..574f554 --- /dev/null +++ b/src/perp_bot/ipc/state.py @@ -0,0 +1,60 @@ +"""Thread-safe daemon state container shared between trading loop and IPC server.""" + +from __future__ import annotations + +import threading +import time +from dataclasses import dataclass, field + + +@dataclass +class DaemonState: + """Volatile runtime state exposed to TUI clients via the socket.""" + + mode: str = "paper" + paused: bool = False + start_time: float = field(default_factory=time.time) + tick_count: int = 0 + last_tick_ms: int = 0 + mid_prices: dict[str, float] = field(default_factory=dict) + latest_signals: dict[str, dict] = field(default_factory=dict) + ws_healthy: bool = False + prediction_regime: str = "NORMAL" + risk_allowed: bool = True + risk_reason: str = "all_checks_passed" + cooldown_remaining_s: float = 0.0 + daily_pnl: float = 0.0 + slippage_stats: dict = field( + default_factory=lambda: {"count": 0, "avg_pct": 0.0, "max_pct": 0.0}, + ) + + _lock: threading.Lock = field( + default_factory=threading.Lock, repr=False, + ) + + def snapshot(self) -> dict: + """Return a frozen dict copy of all state — safe for JSON serialisation.""" + with self._lock: + return { + "mode": self.mode, + "paused": self.paused, + "uptime_seconds": time.time() - self.start_time, + "tick_count": self.tick_count, + "last_tick_ms": self.last_tick_ms, + "mid_prices": dict(self.mid_prices), + "latest_signals": {k: dict(v) for k, v in self.latest_signals.items()}, + "ws_healthy": self.ws_healthy, + "prediction_regime": self.prediction_regime, + "risk_allowed": self.risk_allowed, + "risk_reason": self.risk_reason, + "cooldown_remaining_s": self.cooldown_remaining_s, + "daily_pnl": self.daily_pnl, + "slippage_stats": dict(self.slippage_stats), + } + + def update(self, **kwargs) -> None: + """Thread-safe bulk update of state fields.""" + with self._lock: + for key, value in kwargs.items(): + if hasattr(self, key) and not key.startswith("_"): + setattr(self, key, value) diff --git a/src/perp_bot/reporting/__init__.py b/src/perp_bot/reporting/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/perp_bot/reporting/compare.py b/src/perp_bot/reporting/compare.py new file mode 100644 index 0000000..0b4ba71 --- /dev/null +++ b/src/perp_bot/reporting/compare.py @@ -0,0 +1,145 @@ +"""Paper-vs-backtest comparison — validates live behaviour against backtest expectations.""" + +from __future__ import annotations + +import time + +from perp_bot.backtest.engine import BacktestEngine +from perp_bot.config import BacktestConfig, BotConfig +from perp_bot.data.db import Database + + +def compare_paper_vs_backtest( + config: BotConfig, + db: Database, + symbol: str, + start_ms: int | None = None, + end_ms: int | None = None, +) -> str: + """Run a backtest over the given range and compare against paper trades in DB. + + Returns a formatted comparison report. + """ + if end_ms is None: + end_ms = int(time.time() * 1000) + if start_ms is None: + start_ms = end_ms - 7 * 24 * 3600 * 1000 # default: last 7 days + + # --- Paper trade metrics --- + paper_trades = db.get_closed_trades_in_range(start_ms, end_ms) + paper_trades = [t for t in paper_trades if t["symbol"] == symbol] + paper_stats = _compute_stats(paper_trades, "Paper") + + # --- Backtest over the same range --- + bt_config = config.backtest or BacktestConfig() + engine = BacktestEngine(config, bt_config) + bt_result = engine.run( + db, symbol, start_time_ms=start_ms, end_time_ms=end_ms, + ) + bt_trades = bt_result.trades + bt_stats = _compute_stats(bt_trades, "Backtest") + + # --- Format comparison --- + lines = [ + f"=== Paper vs Backtest Comparison: {symbol} ===", + f"Period: {_fmt_ms(start_ms)} → {_fmt_ms(end_ms)}", + "", + f"{'Metric':<20} {'Paper':>12} {'Backtest':>12} {'Delta':>12}", + "-" * 58, + ] + + for key in ["trades", "win_rate_pct", "net_pnl", "avg_pnl", "avg_hold_h"]: + p_val = paper_stats.get(key, 0) + b_val = bt_stats.get(key, 0) + delta = p_val - b_val + if key == "win_rate_pct": + lines.append( + f"{key:<20} {p_val:>11.1f}% {b_val:>11.1f}% {delta:>+11.1f}%" + ) + elif key == "trades": + lines.append( + f"{key:<20} {p_val:>12d} {b_val:>12d} {delta:>+12d}" + ) + else: + lines.append( + f"{key:<20} {p_val:>11.2f}$ {b_val:>11.2f}$ {delta:>+11.2f}$" + ) + + # --- Verdict --- + lines.append("") + if not paper_trades: + lines.append("Verdict: No paper trades in this period — nothing to compare.") + elif not bt_trades: + lines.append("Verdict: No backtest trades generated — check data availability.") + else: + pnl_delta_pct = abs( + (paper_stats["net_pnl"] - bt_stats["net_pnl"]) + / max(abs(bt_stats["net_pnl"]), 1) + * 100 + ) + wr_delta = abs( + paper_stats["win_rate_pct"] - bt_stats["win_rate_pct"] + ) + if pnl_delta_pct > 50 or wr_delta > 20: + lines.append( + "Verdict: SIGNIFICANT DIVERGENCE — paper results differ " + "materially from backtest. Investigate execution quality." + ) + elif pnl_delta_pct > 25 or wr_delta > 10: + lines.append( + "Verdict: MODERATE DIVERGENCE — some difference between " + "paper and backtest. Monitor closely." + ) + else: + lines.append( + "Verdict: CONSISTENT — paper trading aligns with " + "backtest expectations." + ) + + return "\n".join(lines) + + +def _compute_stats(trades: list, label: str) -> dict: + """Compute aggregate statistics from trade dicts or TradeRecord dataclasses.""" + if not trades: + return { + "trades": 0, + "win_rate_pct": 0.0, + "net_pnl": 0.0, + "avg_pnl": 0.0, + "avg_hold_h": 0.0, + } + + pnls = [_get(t, "pnl", 0) for t in trades] + wins = sum(1 for p in pnls if p > 0) + net = sum(pnls) + + hold_hours = [] + for t in trades: + entry = _get(t, "entry_time", 0) or _get(t, "entry_time_ms", 0) + exit_ = _get(t, "exit_time", 0) or _get(t, "exit_time_ms", 0) + if entry and exit_: + hold_hours.append((exit_ - entry) / 3_600_000) + + return { + "trades": len(trades), + "win_rate_pct": wins / len(trades) * 100, + "net_pnl": net, + "avg_pnl": net / len(trades), + "avg_hold_h": sum(hold_hours) / len(hold_hours) if hold_hours else 0, + } + + +def _get(obj, key: str, default=None): + """Get an attribute from a dict or dataclass.""" + if isinstance(obj, dict): + return obj.get(key, default) or default + return getattr(obj, key, default) or default + + +def _fmt_ms(ms: int) -> str: + """Format epoch ms as a human-readable date string.""" + import datetime + + dt = datetime.datetime.fromtimestamp(ms / 1000, tz=datetime.timezone.utc) + return dt.strftime("%Y-%m-%d %H:%M UTC") diff --git a/src/perp_bot/reporting/weekly.py b/src/perp_bot/reporting/weekly.py new file mode 100644 index 0000000..9f224aa --- /dev/null +++ b/src/perp_bot/reporting/weekly.py @@ -0,0 +1,89 @@ +"""Weekly performance report — aggregates trade data for review.""" + +from __future__ import annotations + +import math +import time + +from perp_bot.data.db import Database + + +def generate_weekly_report(db: Database, weeks: int = 1) -> str: + """Generate a performance summary for the last N weeks of closed trades. + + Returns a formatted text report. + """ + now_ms = int(time.time() * 1000) + week_ms = 7 * 24 * 3600 * 1000 + start_ms = now_ms - weeks * week_ms + + trades = db.get_closed_trades_in_range(start_ms, now_ms) + + if not trades: + return f"No closed trades in the last {weeks} week(s)." + + pnls = [t.get("pnl", 0) or 0 for t in trades] + wins = [p for p in pnls if p > 0] + losses = [p for p in pnls if p <= 0] + + total_pnl = sum(pnls) + win_rate = len(wins) / len(pnls) * 100 if pnls else 0 + avg_win = sum(wins) / len(wins) if wins else 0 + avg_loss = sum(losses) / len(losses) if losses else 0 + + # Sharpe ratio (annualised, assuming daily returns) + sharpe = _compute_sharpe(pnls) + + # Max drawdown + max_dd = _compute_max_drawdown(pnls) + + # Exit reason breakdown + reasons: dict[str, int] = {} + for t in trades: + r = t.get("exit_reason", "unknown") or "unknown" + reasons[r] = reasons.get(r, 0) + 1 + + lines = [ + f"=== Weekly Performance Report ({weeks}w) ===", + f"Period: {weeks} week(s) ending now", + f"Total trades: {len(trades)}", + f"Win rate: {win_rate:.1f}%", + f"Net P&L: ${total_pnl:.2f}", + f"Avg win: ${avg_win:.2f}", + f"Avg loss: ${avg_loss:.2f}", + f"Sharpe (ann): {sharpe:.2f}", + f"Max drawdown: ${max_dd:.2f}", + "", + "Exit reasons:", + ] + for reason, count in sorted(reasons.items(), key=lambda x: -x[1]): + lines.append(f" {reason}: {count}") + + return "\n".join(lines) + + +def _compute_sharpe(pnls: list[float], risk_free: float = 0.0) -> float: + """Annualised Sharpe ratio from a list of per-trade P&Ls.""" + if len(pnls) < 2: + return 0.0 + mean = sum(pnls) / len(pnls) - risk_free + std = math.sqrt(sum((p - mean) ** 2 for p in pnls) / (len(pnls) - 1)) + if std == 0: + return 0.0 + # Annualise assuming ~1 trade/day, 365 trading days + return (mean / std) * math.sqrt(365) + + +def _compute_max_drawdown(pnls: list[float]) -> float: + """Maximum drawdown from cumulative P&L curve.""" + if not pnls: + return 0.0 + cumulative = 0.0 + peak = 0.0 + max_dd = 0.0 + for p in pnls: + cumulative += p + peak = max(peak, cumulative) + dd = peak - cumulative + max_dd = max(max_dd, dd) + return max_dd diff --git a/src/perp_bot/risk/__init__.py b/src/perp_bot/risk/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/perp_bot/risk/manager.py b/src/perp_bot/risk/manager.py new file mode 100644 index 0000000..56387b8 --- /dev/null +++ b/src/perp_bot/risk/manager.py @@ -0,0 +1,113 @@ +"""Risk manager — enforces all risk limits before allowing trades.""" + +from __future__ import annotations + +import logging +import time +from dataclasses import dataclass + +from perp_bot.config import BotConfig +from perp_bot.data.db import Database +from perp_bot.signals.prediction import PredictionRegime + +logger = logging.getLogger(__name__) + + +@dataclass +class RiskCheck: + allowed: bool + reason: str + + +class RiskManager: + def __init__(self, config: BotConfig, db: Database) -> None: + self.config = config + self.risk = config.risk + self.trading = config.trading + self.db = db + self._last_stop_loss_time: int | None = self._load_cooldown_state() + + def check_entry(self) -> RiskCheck: + """Run all pre-entry risk checks. Returns whether a new trade is allowed.""" + # Max positions + open_trades = self.db.get_open_trades() + if len(open_trades) >= self.risk.max_positions: + return RiskCheck(False, f"max_positions ({self.risk.max_positions}) reached") + + # Daily loss limit + day_start = _day_start_ms() + daily_pnl = self.db.get_daily_pnl(day_start) + max_daily_loss = self.trading.capital_usd * self.risk.daily_loss_limit_pct + if daily_pnl <= -max_daily_loss: + return RiskCheck(False, f"daily_loss_limit hit ({daily_pnl:.2f})") + + # Cooldown after stop-loss + if self._last_stop_loss_time is not None: + elapsed = int(time.time() * 1000) - self._last_stop_loss_time + if elapsed < self.risk.cooldown_seconds * 1000: + remaining = (self.risk.cooldown_seconds * 1000 - elapsed) / 1000 + return RiskCheck(False, f"cooldown active ({remaining:.0f}s remaining)") + + return RiskCheck(True, "all_checks_passed") + + def compute_position_size( + self, prediction_regime: PredictionRegime = PredictionRegime.NORMAL, + ) -> float: + """Compute position size in USD respecting margin usage limit. + + Reduces size under HIGH_RISK regime, blocks entirely under CRISIS. + """ + if prediction_regime == PredictionRegime.CRISIS: + return 0.0 + max_margin = self.trading.capital_usd * self.trading.margin_usage_limit + size = max_margin * self.trading.leverage + if prediction_regime == PredictionRegime.HIGH_RISK and self.config.prediction: + size *= self.config.prediction.position_size_reduction + return size + + def check_stop_loss(self, entry_price: float, current_price: float, side: str) -> bool: + """Check if the capital-based stop-loss threshold is breached. + + Returns True if the position should be stopped out. + """ + position_size = self.compute_position_size() + if side == "long": + pnl = (current_price - entry_price) / entry_price * position_size + else: + pnl = (entry_price - current_price) / entry_price * position_size + + max_loss = self.trading.capital_usd * self.risk.max_loss_per_trade_pct + return pnl <= -max_loss + + def check_position_timeout(self, entry_time: int) -> bool: + """Check if position has exceeded the maximum holding time.""" + elapsed_hours = (int(time.time() * 1000) - entry_time) / 3_600_000 + return elapsed_hours >= self.risk.position_timeout_hours + + def record_stop_loss(self) -> None: + """Record that a stop-loss was hit, starting the cooldown timer.""" + self._last_stop_loss_time = int(time.time() * 1000) + self.db.set_state( + "last_stop_loss_time_ms", str(self._last_stop_loss_time) + ) + logger.warning("Stop-loss triggered — cooldown started for %ds", self.risk.cooldown_seconds) + + def _load_cooldown_state(self) -> int | None: + """Load persisted cooldown timestamp from DB.""" + val = self.db.get_state("last_stop_loss_time_ms") + if val is not None: + ts = int(val) + elapsed = int(time.time() * 1000) - ts + if elapsed < self.risk.cooldown_seconds * 1000: + logger.info( + "Restored cooldown state — %ds remaining", + (self.risk.cooldown_seconds * 1000 - elapsed) / 1000, + ) + return ts + return None + + +def _day_start_ms() -> int: + """Return epoch ms for the start of the current UTC day.""" + now = int(time.time()) + return (now - now % 86400) * 1000 diff --git a/src/perp_bot/signals/__init__.py b/src/perp_bot/signals/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/perp_bot/signals/engine.py b/src/perp_bot/signals/engine.py new file mode 100644 index 0000000..332f53f --- /dev/null +++ b/src/perp_bot/signals/engine.py @@ -0,0 +1,164 @@ +"""Signal engine — combines all indicators to produce entry/exit signals.""" + +from __future__ import annotations + +from dataclasses import dataclass +from enum import Enum + +import pandas as pd + +from perp_bot.config import SignalConfig +from perp_bot.signals.indicators import adx, bollinger_bands, rsi, zscore +from perp_bot.signals.prediction import PredictionRegime + + +class Signal(Enum): + LONG = "long" + SHORT = "short" + CLOSE = "close" + NONE = "none" + + +@dataclass +class SignalResult: + signal: Signal + reason: str + zscore_value: float + rsi_value: float + adx_value: float + price: float + prediction_regime: PredictionRegime = PredictionRegime.NORMAL + + +class SignalEngine: + def __init__(self, config: SignalConfig) -> None: + self.config = config + + def compute_indicators(self, df: pd.DataFrame) -> pd.DataFrame: + """Add all indicator columns to a candle DataFrame. + + Expects df with columns: open, high, low, close, volume + """ + c = self.config + df = df.copy() + + df["zscore"] = zscore(df["close"], c.zscore_lookback) + df["bb_mid"], df["bb_upper"], df["bb_lower"] = bollinger_bands( + df["close"], c.bollinger_period, c.bollinger_std + ) + df["rsi"] = rsi(df["close"], c.rsi_period) + df["adx"] = adx(df["high"], df["low"], df["close"], c.adx_period) + + return df + + def evaluate( + self, + df: pd.DataFrame, + position_side: str | None = None, + prediction_regime: PredictionRegime = PredictionRegime.NORMAL, + preferred_side: str | None = None, + ) -> SignalResult: + """Evaluate the latest candle for entry/exit signals. + + Args: + df: DataFrame with indicator columns (from compute_indicators) + position_side: "long", "short", or None if no position + prediction_regime: current macro regime from prediction markets + preferred_side: "long" or "short" funding preference, or None + """ + latest = df.iloc[-1] + c = self.config + + z = latest["zscore"] + r = latest["rsi"] + a = latest["adx"] + price = latest["close"] + + result_kwargs = { + "zscore_value": z, + "rsi_value": r, + "adx_value": a, + "price": price, + "prediction_regime": prediction_regime, + } + + # --- Regime-adjusted thresholds --- + entry_threshold = c.zscore_entry_threshold + exit_threshold = c.zscore_exit_threshold + + if prediction_regime == PredictionRegime.HIGH_RISK: + entry_threshold = 2.5 # stricter entry + exit_threshold = 0.5 # take profits earlier + elif prediction_regime == PredictionRegime.CRISIS: + # Block all new entries but allow exits + if position_side is None: + return SignalResult( + signal=Signal.NONE, + reason=f"crisis_regime_block (z={z:.2f})", + **result_kwargs, + ) + + # --- Exit signals (checked first) --- + if position_side is not None: + # Profit exit: Z-score reverts to mean + if abs(z) < exit_threshold: + return SignalResult( + signal=Signal.CLOSE, + reason=f"mean_reversion_complete (z={z:.2f})", + **result_kwargs, + ) + # Stop exit: Z-score extends further against position + if position_side == "long" and z < -c.zscore_stop_threshold: + return SignalResult( + signal=Signal.CLOSE, + reason=f"zscore_stop_long (z={z:.2f})", + **result_kwargs, + ) + if position_side == "short" and z > c.zscore_stop_threshold: + return SignalResult( + signal=Signal.CLOSE, + reason=f"zscore_stop_short (z={z:.2f})", + **result_kwargs, + ) + + # --- Entry signals (only if no position) --- + if position_side is None and a < c.adx_threshold: + # Regime-adjusted entry thresholds per side + short_entry_threshold = entry_threshold + long_entry_threshold = entry_threshold + + if prediction_regime == PredictionRegime.DOVISH_SHIFT: + long_entry_threshold *= 0.9 # relax long entry slightly + elif prediction_regime == PredictionRegime.HAWKISH_SHIFT: + short_entry_threshold *= 0.9 # relax short entry slightly + + # Funding preference relaxation + if preferred_side == "long": + long_entry_threshold = min(long_entry_threshold, entry_threshold * 0.9) + elif preferred_side == "short": + short_entry_threshold = min(short_entry_threshold, entry_threshold * 0.9) + + # Short entry + if ( + z > short_entry_threshold + and r > c.rsi_overbought + and price > latest["bb_upper"] + ): + return SignalResult( + signal=Signal.SHORT, + reason=f"short_entry (z={z:.2f}, rsi={r:.1f}, adx={a:.1f})", + **result_kwargs, + ) + # Long entry + if ( + z < -long_entry_threshold + and r < c.rsi_oversold + and price < latest["bb_lower"] + ): + return SignalResult( + signal=Signal.LONG, + reason=f"long_entry (z={z:.2f}, rsi={r:.1f}, adx={a:.1f})", + **result_kwargs, + ) + + return SignalResult(signal=Signal.NONE, reason="no_signal", **result_kwargs) diff --git a/src/perp_bot/signals/indicators.py b/src/perp_bot/signals/indicators.py new file mode 100644 index 0000000..027a849 --- /dev/null +++ b/src/perp_bot/signals/indicators.py @@ -0,0 +1,136 @@ +"""Self-implemented technical indicators — no ta-lib dependency. + +All functions take pandas Series (typically close prices) and return Series. +""" + +from __future__ import annotations + +import numpy as np +import pandas as pd + + +def zscore(series: pd.Series, lookback: int) -> pd.Series: + """Rolling Z-score: (price - mean) / std.""" + mean = series.rolling(lookback).mean() + std = series.rolling(lookback).std(ddof=0) + return (series - mean) / std.replace(0, np.nan) + + +def bollinger_bands( + series: pd.Series, period: int, num_std: float +) -> tuple[pd.Series, pd.Series, pd.Series]: + """Returns (middle, upper, lower) Bollinger Bands.""" + middle = series.rolling(period).mean() + std = series.rolling(period).std(ddof=0) + upper = middle + num_std * std + lower = middle - num_std * std + return middle, upper, lower + + +def rsi(series: pd.Series, period: int) -> pd.Series: + """RSI using Wilder's smoothing (EMA with alpha=1/period).""" + delta = series.diff() + gain = delta.clip(lower=0) + loss = (-delta).clip(lower=0) + + # Wilder smoothing = EMA with alpha = 1/period + alpha = 1.0 / period + avg_gain = gain.ewm(alpha=alpha, min_periods=period, adjust=False).mean() + avg_loss = loss.ewm(alpha=alpha, min_periods=period, adjust=False).mean() + + # When avg_loss is 0 (pure uptrend), RSI = 100 + rs = avg_gain / avg_loss.replace(0, np.nan) + result = 100.0 - (100.0 / (1.0 + rs)) + # Fill NaN from zero-loss (pure uptrend) with 100, zero-gain (pure downtrend) with 0 + result = result.fillna(pd.Series( + np.where(avg_loss == 0, 100.0, np.where(avg_gain == 0, 0.0, np.nan)), + index=result.index, + )) + return result + + +def hurst_exponent(series: pd.Series, max_lag: int = 100) -> float: + """Hurst exponent via the rescaled range (R/S) method. + + H < 0.5 → mean-reverting (good for this strategy) + H = 0.5 → random walk + H > 0.5 → trending + + Args: + series: Price or log-return series. + max_lag: Maximum lag for R/S calculation. + + Returns: + Estimated Hurst exponent as a float. + """ + series = series.dropna().values + n = len(series) + if n < 20: + return 0.5 # Not enough data — assume random walk + + # Use log returns for stationarity + log_returns = np.diff(np.log(series)) + + # Compute R/S for various lag sizes + lags = [] + rs_values = [] + for lag in range(10, min(max_lag, n // 2) + 1): + rs_list = [] + for start in range(0, len(log_returns) - lag + 1, lag): + chunk = log_returns[start : start + lag] + mean_chunk = np.mean(chunk) + deviations = np.cumsum(chunk - mean_chunk) + r = np.max(deviations) - np.min(deviations) + s = np.std(chunk, ddof=1) + if s > 0: + rs_list.append(r / s) + if rs_list: + lags.append(lag) + rs_values.append(np.mean(rs_list)) + + if len(lags) < 2: + return 0.5 + + # Linear regression of log(R/S) on log(lag) + log_lags = np.log(lags) + log_rs = np.log(rs_values) + poly = np.polyfit(log_lags, log_rs, 1) + return float(poly[0]) + + +def adx( + high: pd.Series, low: pd.Series, close: pd.Series, period: int +) -> pd.Series: + """Average Directional Index (ADX) using Wilder's smoothing. + + Measures trend strength: < 25 = range-bound, > 25 = trending. + """ + # True Range + prev_close = close.shift(1) + tr = pd.concat( + [high - low, (high - prev_close).abs(), (low - prev_close).abs()], + axis=1, + ).max(axis=1) + + # Directional Movement + up_move = high - high.shift(1) + down_move = low.shift(1) - low + plus_dm = np.where((up_move > down_move) & (up_move > 0), up_move, 0.0) + minus_dm = np.where((down_move > up_move) & (down_move > 0), down_move, 0.0) + + plus_dm = pd.Series(plus_dm, index=high.index) + minus_dm = pd.Series(minus_dm, index=high.index) + + # Wilder smoothing + alpha = 1.0 / period + atr = tr.ewm(alpha=alpha, min_periods=period, adjust=False).mean() + smooth_plus_dm = plus_dm.ewm(alpha=alpha, min_periods=period, adjust=False).mean() + smooth_minus_dm = minus_dm.ewm(alpha=alpha, min_periods=period, adjust=False).mean() + + # Directional Indicators + plus_di = 100.0 * smooth_plus_dm / atr.replace(0, np.nan) + minus_di = 100.0 * smooth_minus_dm / atr.replace(0, np.nan) + + # ADX + dx = 100.0 * (plus_di - minus_di).abs() / (plus_di + minus_di).replace(0, np.nan) + return dx.ewm(alpha=alpha, min_periods=period, adjust=False).mean() diff --git a/src/perp_bot/signals/prediction.py b/src/perp_bot/signals/prediction.py new file mode 100644 index 0000000..bfa7163 --- /dev/null +++ b/src/perp_bot/signals/prediction.py @@ -0,0 +1,92 @@ +"""Prediction market scoring — regime classification and funding side preference.""" + +from __future__ import annotations + +from enum import Enum + +from perp_bot.config import PredictionConfig + + +class PredictionRegime(Enum): + NORMAL = "normal" + HIGH_RISK = "high_risk" + DOVISH_SHIFT = "dovish_shift" + HAWKISH_SHIFT = "hawkish_shift" + CRISIS = "crisis" + + +def war_risk_score(snapshots: list[dict], market_weights: dict[str, float]) -> float: + """Compute weighted average war risk probability. + + Args: + snapshots: list of prediction snapshot dicts with 'market_slug' and 'probability' + market_weights: mapping of market_slug → weight for war_risk markets + + Returns: + Weighted average probability (0.0 to 1.0), or 0.0 if no data. + """ + total_weight = 0.0 + weighted_sum = 0.0 + for snap in snapshots: + slug = snap["market_slug"] + if slug in market_weights: + w = market_weights[slug] + weighted_sum += snap["probability"] * w + total_weight += w + return weighted_sum / total_weight if total_weight > 0 else 0.0 + + +def rate_change_score(snapshots: list[dict]) -> float: + """Compute net rate direction from rate_change prediction markets. + + Convention: + - probability > 0.5 → market expects a rate hike → positive score + - probability < 0.5 → market expects a rate cut → negative score + + Returns: + Score from -1.0 (strong cut expectation) to +1.0 (strong hike expectation). + 0.0 if no data. + """ + if not snapshots: + return 0.0 + # Average probability across rate_change markets, then centre on 0.5 + avg_prob = sum(s["probability"] for s in snapshots) / len(snapshots) + # Map [0, 1] → [-1, 1]: score = (prob - 0.5) * 2 + return (avg_prob - 0.5) * 2.0 + + +def compute_regime( + war_risk: float, + rate_change: float, + config: PredictionConfig, +) -> PredictionRegime: + """Determine the prediction regime from current scores. + + Priority: CRISIS > HIGH_RISK > DOVISH/HAWKISH > NORMAL + """ + if war_risk >= config.war_risk_crisis_threshold: + return PredictionRegime.CRISIS + if war_risk >= config.war_risk_threshold: + return PredictionRegime.HIGH_RISK + if rate_change < -config.rate_change_threshold: + return PredictionRegime.DOVISH_SHIFT + if rate_change > config.rate_change_threshold: + return PredictionRegime.HAWKISH_SHIFT + return PredictionRegime.NORMAL + + +def funding_side_preference(rate_change: float, threshold: float) -> str | None: + """Suggest a preferred trading side based on rate expectations. + + - Dovish (rate cuts expected) → prefer long (assets tend to rise) + - Hawkish (rate hikes expected) → prefer short (assets tend to fall) + - Neutral → no preference + + Returns: + "long", "short", or None + """ + if rate_change < -threshold: + return "long" + if rate_change > threshold: + return "short" + return None diff --git a/src/perp_bot/tui/__init__.py b/src/perp_bot/tui/__init__.py new file mode 100644 index 0000000..9eb4d9b --- /dev/null +++ b/src/perp_bot/tui/__init__.py @@ -0,0 +1 @@ +"""TUI dashboard — Textual-based terminal interface for monitoring the daemon.""" diff --git a/src/perp_bot/tui/app.py b/src/perp_bot/tui/app.py new file mode 100644 index 0000000..dee795e --- /dev/null +++ b/src/perp_bot/tui/app.py @@ -0,0 +1,210 @@ +"""Main TUI application — attaches to a running daemon for live monitoring.""" + +from __future__ import annotations + +import sqlite3 +import time +from pathlib import Path + +from textual.app import App, ComposeResult +from textual.binding import Binding +from textual.screen import ModalScreen +from textual.widgets import Footer, OptionList +from textual.widgets.option_list import Option + +from perp_bot.config import BotConfig +from perp_bot.ipc.client import DaemonClient +from perp_bot.ipc.protocol import get_socket_path +from perp_bot.tui.widgets.header import HeaderWidget +from perp_bot.tui.widgets.log import LogWidget +from perp_bot.tui.widgets.position import PositionWidget +from perp_bot.tui.widgets.risk import RiskWidget +from perp_bot.tui.widgets.signals import SignalsWidget +from perp_bot.tui.widgets.trades import TradesWidget + + +class EmergencyCloseScreen(ModalScreen[str | None]): + """Modal symbol picker for emergency close.""" + + DEFAULT_CSS = """ + EmergencyCloseScreen { + align: center middle; + } + EmergencyCloseScreen > OptionList { + width: 40; + height: auto; + max-height: 12; + border: heavy red; + background: $surface; + } + """ + + BINDINGS = [Binding("escape", "cancel", "Cancel")] + + def __init__(self, symbols: list[str], **kwargs) -> None: + super().__init__(**kwargs) + self._symbols = symbols + + def compose(self) -> ComposeResult: + options = [Option(f"CLOSE {s}", id=s) for s in self._symbols] + yield OptionList(*options) + + def on_option_list_option_selected( + self, event: OptionList.OptionSelected, + ) -> None: + self.dismiss(event.option.id) + + def action_cancel(self) -> None: + self.dismiss(None) + + +class PerpBotApp(App): + """Terminal UI for monitoring the perp-bot daemon.""" + + CSS_PATH = "app.tcss" + TITLE = "perp-bot" + + BINDINGS = [ + Binding("p", "pause", "Pause"), + Binding("r", "resume", "Resume"), + Binding("e", "emergency_close", "Emergency Close"), + Binding("q", "quit", "Quit"), + ] + + def __init__(self, config: BotConfig, **kwargs) -> None: + super().__init__(**kwargs) + self._config = config + self._db_path = Path(config.data.db_path) + self._socket_path = get_socket_path(config.data.db_path) + self._log_path = self._socket_path.parent / "perp-bot.log" + self._client = DaemonClient(self._socket_path) + self._last_state: dict | None = None + + def compose(self) -> ComposeResult: + yield HeaderWidget() + yield PositionWidget(id="position") + yield SignalsWidget(id="signals") + yield RiskWidget(id="risk") + yield TradesWidget(id="trades") + yield LogWidget(self._log_path, id="log") + yield Footer() + + def on_mount(self) -> None: + # Staggered polling timers + self.set_interval(2.0, self._poll_daemon) + self.set_interval(15.0, self._poll_trades) + self.set_interval(3.0, self._poll_log) + # Initial fetch + self._poll_daemon() + self._poll_trades() + + def _poll_daemon(self) -> None: + """Fetch daemon state via socket and update volatile widgets.""" + self.run_worker(self._fetch_and_update_state, thread=True) + + def _fetch_and_update_state(self) -> None: + state = self._client.get_state() + self._last_state = state + self.call_from_thread(self._apply_state, state) + + def _apply_state(self, state: dict | None) -> None: + self.query_one(HeaderWidget).update_state(state) + self.query_one(SignalsWidget).update_state(state) + self.query_one(RiskWidget).update_state(state) + + # Position needs open trades from DB + open_trades = self._query_open_trades() + self.query_one(PositionWidget).update_state(state, open_trades) + + def _poll_trades(self) -> None: + """Read recent closed trades from DB.""" + self.run_worker(self._fetch_and_update_trades, thread=True) + + def _fetch_and_update_trades(self) -> None: + trades = self._query_recent_trades() + self.call_from_thread(self._apply_trades, trades) + + def _apply_trades(self, trades: list[dict]) -> None: + self.query_one(TradesWidget).update_trades(trades) + + def _poll_log(self) -> None: + """Tail the log file for new entries.""" + self.query_one(LogWidget).poll_log() + + # ── DB queries (read-only) ──────────────────────── + + def _get_ro_connection(self) -> sqlite3.Connection: + """Open a read-only SQLite connection.""" + uri = f"file:{self._db_path}?mode=ro" + return sqlite3.connect(uri, uri=True, timeout=2) + + def _query_open_trades(self) -> list[dict]: + try: + conn = self._get_ro_connection() + cur = conn.execute( + "SELECT * FROM trades WHERE exit_time IS NULL", + ) + cols = [d[0] for d in cur.description] + rows = [dict(zip(cols, row)) for row in cur.fetchall()] + conn.close() + return rows + except Exception: + return [] + + def _query_recent_trades(self) -> list[dict]: + try: + conn = self._get_ro_connection() + now_ms = int(time.time() * 1000) + week_ms = 7 * 24 * 3600 * 1000 + cur = conn.execute( + "SELECT * FROM trades WHERE exit_time IS NOT NULL" + " AND exit_time >= ? ORDER BY exit_time DESC LIMIT 10", + (now_ms - week_ms,), + ) + cols = [d[0] for d in cur.description] + rows = [dict(zip(cols, row)) for row in cur.fetchall()] + conn.close() + return list(reversed(rows)) + except Exception: + return [] + + # ── Actions ─────────────────────────────────────── + + def action_pause(self) -> None: + result = self._client.pause() + if result and result.get("ok"): + self.notify("Daemon PAUSED", severity="warning") + else: + self.notify("Failed to pause (daemon offline?)", severity="error") + + def action_resume(self) -> None: + result = self._client.resume() + if result and result.get("ok"): + self.notify("Daemon RESUMED", severity="information") + else: + self.notify("Failed to resume (daemon offline?)", severity="error") + + def action_emergency_close(self) -> None: + """Emergency close — show symbol picker, then close selected.""" + symbols = self._config.trading.symbols + if not symbols: + self.notify("No symbols configured", severity="error") + return + self.push_screen( + EmergencyCloseScreen(symbols), + callback=self._on_emergency_symbol_selected, + ) + + def _on_emergency_symbol_selected(self, symbol: str | None) -> None: + if symbol is None: + return + result = self._client.emergency_close(symbol) + if result and result.get("ok"): + closed = result.get("closed", 0) + self.notify( + f"Emergency close: {closed} position(s) for {symbol}", + severity="warning", + ) + else: + err = result.get("error", "unknown") if result else "no response" + self.notify(f"Emergency close failed: {err}", severity="error") diff --git a/src/perp_bot/tui/app.tcss b/src/perp_bot/tui/app.tcss new file mode 100644 index 0000000..fe323c2 --- /dev/null +++ b/src/perp_bot/tui/app.tcss @@ -0,0 +1,47 @@ +/* Perp Bot TUI Layout */ + +Screen { + layout: grid; + grid-size: 2 4; + grid-rows: 1 1fr 1fr 1fr; + grid-columns: 1fr 1fr; +} + +HeaderWidget { + column-span: 2; + height: 1; + background: $surface; +} + +PositionWidget { + border: solid $primary; + padding: 0 1; + height: 100%; +} + +SignalsWidget { + border: solid $primary; + padding: 0 1; + height: 100%; +} + +RiskWidget { + border: solid $primary; + padding: 0 1; + height: 100%; +} + +TradesWidget { + border: solid $primary; + height: 100%; +} + +LogWidget { + column-span: 2; + border: solid $accent; + height: 100%; +} + +Footer { + column-span: 2; +} diff --git a/src/perp_bot/tui/widgets/__init__.py b/src/perp_bot/tui/widgets/__init__.py new file mode 100644 index 0000000..8171c02 --- /dev/null +++ b/src/perp_bot/tui/widgets/__init__.py @@ -0,0 +1 @@ +"""TUI widget components.""" diff --git a/src/perp_bot/tui/widgets/header.py b/src/perp_bot/tui/widgets/header.py new file mode 100644 index 0000000..4252bcd --- /dev/null +++ b/src/perp_bot/tui/widgets/header.py @@ -0,0 +1,62 @@ +"""Header widget — mode badge, lead price, WS health, regime, uptime.""" + +from __future__ import annotations + +from textual.widgets import Static + + +def _fmt_uptime(seconds: float) -> str: + h, rem = divmod(int(seconds), 3600) + m, s = divmod(rem, 60) + if h > 0: + return f"{h}h {m:02d}m" + return f"{m}m {s:02d}s" + + +class HeaderWidget(Static): + """Top bar showing daemon overview.""" + + DEFAULT_CSS = """ + HeaderWidget { + dock: top; + height: 1; + background: $surface; + color: $text; + content-align: center middle; + } + """ + + def update_state(self, state: dict | None) -> None: + if state is None: + self.update("[bold red] DAEMON OFFLINE [/]") + return + + mode = state.get("mode", "?") + mode_style = "green" if mode == "paper" else "red" + if state.get("paused"): + mode_style = "yellow" + mode = f"{mode} PAUSED" + + ws = "[green]OK[/]" if state.get("ws_healthy") else "[red]STALE[/]" + + regime = state.get("prediction_regime", "NORMAL") + regime_colors = { + "NORMAL": "green", "HIGH_RISK": "yellow", + "DOVISH_SHIFT": "yellow", "HAWKISH_SHIFT": "yellow", + "CRISIS": "red", + } + rc = regime_colors.get(regime, "white") + + # Lead symbol price + mid_prices = state.get("mid_prices", {}) + price_parts = [] + for sym, px in mid_prices.items(): + price_parts.append(f"{sym} ${px:,.2f}") + price_str = " | ".join(price_parts) if price_parts else "---" + + uptime = _fmt_uptime(state.get("uptime_seconds", 0)) + + self.update( + f" PERP-BOT [{mode_style}]{mode}[/] | {price_str}" + f" | WS:{ws} | [{rc}]{regime}[/] | {uptime}" + ) diff --git a/src/perp_bot/tui/widgets/log.py b/src/perp_bot/tui/widgets/log.py new file mode 100644 index 0000000..8020295 --- /dev/null +++ b/src/perp_bot/tui/widgets/log.py @@ -0,0 +1,60 @@ +"""Log widget — tails the daemon log file.""" + +from __future__ import annotations + +from pathlib import Path + +from textual.widgets import RichLog + + +class LogWidget(RichLog): + """Tails the daemon's JSON log file and displays recent entries.""" + + BORDER_TITLE = "LOG" + + def __init__(self, log_path: Path | str, **kwargs) -> None: + super().__init__(max_lines=200, wrap=True, markup=True, **kwargs) + self._log_path = Path(log_path) + self._last_pos: int = 0 + # Start from near end of file + if self._log_path.exists(): + size = self._log_path.stat().st_size + self._last_pos = max(0, size - 4096) + + def poll_log(self) -> None: + """Read new lines from the log file since last poll.""" + if not self._log_path.exists(): + return + + try: + with open(self._log_path) as f: + f.seek(self._last_pos) + new_data = f.read() + self._last_pos = f.tell() + except OSError: + return + + if not new_data: + return + + import json + for line in new_data.strip().split("\n"): + if not line: + continue + try: + entry = json.loads(line) + ts = entry.get("ts", "") + # Extract HH:MM:SS from ISO timestamp + time_part = ts.split("T")[1][:8] if "T" in ts else ts[:8] + level = entry.get("level", "INFO") + msg = entry.get("msg", line) + + level_colors = { + "WARNING": "yellow", + "ERROR": "red", + "CRITICAL": "bold red", + } + color = level_colors.get(level, "white") + self.write(f"[dim]{time_part}[/] [{color}]{msg}[/]") + except (json.JSONDecodeError, KeyError): + self.write(line) diff --git a/src/perp_bot/tui/widgets/position.py b/src/perp_bot/tui/widgets/position.py new file mode 100644 index 0000000..f110cfe --- /dev/null +++ b/src/perp_bot/tui/widgets/position.py @@ -0,0 +1,63 @@ +"""Position widget — current open position with live unrealised PnL.""" + +from __future__ import annotations + +from textual.widgets import Static + + +class PositionWidget(Static): + """Displays the current open position, or 'FLAT' if none.""" + + BORDER_TITLE = "POSITION" + + def update_state(self, state: dict | None, open_trades: list[dict] | None = None) -> None: + if state is None: + self.update("[dim]Daemon offline[/]") + return + + if not open_trades: + self.update("[dim]FLAT — no open position[/]") + return + + trade = open_trades[0] + symbol = trade.get("symbol", "?") + side = trade.get("side", "?") + entry_price = trade.get("entry_price", 0) + size_usd = trade.get("size_usd", 0) + + # Calculate unrealised PnL from current mid price + mid_prices = state.get("mid_prices", {}) + current_price = mid_prices.get(symbol) + upnl_str = "[dim]---[/]" + pct_str = "" + + if current_price and entry_price > 0: + if side == "long": + upnl = (current_price - entry_price) / entry_price * size_usd + else: + upnl = (entry_price - current_price) / entry_price * size_usd + pct = upnl / size_usd * 100 if size_usd else 0 + color = "green" if upnl >= 0 else "red" + sign = "+" if upnl >= 0 else "" + upnl_str = f"[{color}]{sign}${upnl:.2f}[/]" + pct_str = f" [{color}]({sign}{pct:.2f}%)[/]" + + # Duration + import time + entry_time = trade.get("entry_time", 0) + if entry_time: + elapsed_s = int(time.time() * 1000 - entry_time) // 1000 + h, rem = divmod(elapsed_s, 3600) + m, _ = divmod(rem, 60) + duration = f"{h}h {m:02d}m" if h else f"{m}m" + else: + duration = "---" + + side_color = "green" if side == "long" else "red" + lines = [ + f"[{side_color}]{side.upper()}[/] {symbol} @ {entry_price:.2f}", + f"Size: ${size_usd:.0f}", + f"uPnL: {upnl_str}{pct_str}", + f"Duration: {duration}", + ] + self.update("\n".join(lines)) diff --git a/src/perp_bot/tui/widgets/risk.py b/src/perp_bot/tui/widgets/risk.py new file mode 100644 index 0000000..43912e4 --- /dev/null +++ b/src/perp_bot/tui/widgets/risk.py @@ -0,0 +1,58 @@ +"""Risk widget — entry check status, daily PnL, cooldown timer.""" + +from __future__ import annotations + +from textual.widgets import Static + + +class RiskWidget(Static): + """Displays risk management state.""" + + BORDER_TITLE = "RISK" + + def update_state(self, state: dict | None, daily_pnl: float = 0.0) -> None: + if state is None: + self.update("[dim]Daemon offline[/]") + return + + allowed = state.get("risk_allowed", False) + reason = state.get("risk_reason", "unknown") + cooldown = state.get("cooldown_remaining_s", 0.0) + pnl = state.get("daily_pnl", daily_pnl) + + # Entry status + if allowed: + entry_str = "[green]ALLOWED[/]" + else: + entry_str = f"[red]BLOCKED[/] ({reason})" + + # Daily PnL + pnl_color = "green" if pnl >= 0 else "red" + sign = "+" if pnl >= 0 else "" + pnl_str = f"[{pnl_color}]{sign}${pnl:.2f}[/]" + + # Cooldown + if cooldown > 0: + m, s = divmod(int(cooldown), 60) + cd_str = f"[yellow]{m}m {s:02d}s[/]" + else: + cd_str = "[dim]--[/]" + + # Slippage + slip = state.get("slippage_stats", {}) + if slip.get("count", 0) > 0: + slip_str = ( + f"avg={slip['avg_pct']:.4f}% " + f"max={slip['max_pct']:.4f}% " + f"({slip['count']} fills)" + ) + else: + slip_str = "[dim]no data[/]" + + lines = [ + f"Entry: {entry_str}", + f"Daily PnL: {pnl_str}", + f"Cooldown: {cd_str}", + f"Slippage: {slip_str}", + ] + self.update("\n".join(lines)) diff --git a/src/perp_bot/tui/widgets/signals.py b/src/perp_bot/tui/widgets/signals.py new file mode 100644 index 0000000..8076b51 --- /dev/null +++ b/src/perp_bot/tui/widgets/signals.py @@ -0,0 +1,67 @@ +"""Signals widget — Z-score, RSI, ADX values with visual bars.""" + +from __future__ import annotations + +from textual.widgets import Static + + +def _bar(value: float, lo: float, hi: float, width: int = 10) -> str: + """Render a simple ASCII gauge bar.""" + clamped = max(lo, min(hi, value)) + filled = int((clamped - lo) / (hi - lo) * width) + return "[" + "=" * filled + "-" * (width - filled) + "]" + + +class SignalsWidget(Static): + """Displays the latest signal indicators for each symbol.""" + + BORDER_TITLE = "SIGNALS" + + def update_state(self, state: dict | None) -> None: + if state is None: + self.update("[dim]Daemon offline[/]") + return + + signals = state.get("latest_signals", {}) + if not signals: + self.update("[dim]No signal data yet[/]") + return + + lines = [] + for symbol, sig in signals.items(): + z = sig.get("zscore", 0) + r = sig.get("rsi", 50) + a = sig.get("adx", 0) + signal_val = sig.get("signal", "none") + + # Colour-code z-score + z_color = "green" if abs(z) < 1.0 else "yellow" if abs(z) < 2.0 else "red" + + # RSI colour + r_color = "red" if r > 70 else "green" if r < 30 else "white" + + # ADX colour (low = range-bound = good for mean-reversion) + a_color = "green" if a < 25 else "yellow" if a < 40 else "red" + + # Signal badge + sig_badges = { + "long": "[green bold]LONG[/]", + "short": "[red bold]SHORT[/]", + "close": "[yellow bold]CLOSE[/]", + "none": "[dim]NONE[/]", + } + badge = sig_badges.get(signal_val, f"[dim]{signal_val}[/]") + + lines.append(f"[bold]{symbol}[/]") + lines.append( + f" Z-Score: [{z_color}]{z:+.2f}[/] {_bar(abs(z), 0, 4)}" + ) + lines.append( + f" RSI: [{r_color}]{r:.1f}[/] {_bar(r, 0, 100)}" + ) + lines.append( + f" ADX: [{a_color}]{a:.1f}[/] {_bar(a, 0, 60)}" + ) + lines.append(f" Signal: {badge}") + + self.update("\n".join(lines)) diff --git a/src/perp_bot/tui/widgets/trades.py b/src/perp_bot/tui/widgets/trades.py new file mode 100644 index 0000000..a077610 --- /dev/null +++ b/src/perp_bot/tui/widgets/trades.py @@ -0,0 +1,28 @@ +"""Trades widget — recent closed trades in a DataTable.""" + +from __future__ import annotations + +from textual.widgets import DataTable + + +class TradesWidget(DataTable): + """Shows the last N closed trades.""" + + BORDER_TITLE = "RECENT TRADES" + + def on_mount(self) -> None: + self.add_columns("ID", "Symbol", "Side", "PnL", "Reason") + self.cursor_type = "none" + + def update_trades(self, trades: list[dict]) -> None: + self.clear() + for t in trades[-10:]: + pnl = t.get("pnl", 0) or 0 + pnl_val = f"+${pnl:.2f}" if pnl >= 0 else f"-${abs(pnl):.2f}" + self.add_row( + str(t.get("id", "?")), + t.get("symbol", "?"), + t.get("side", "?").upper(), + pnl_val, + t.get("exit_reason", "?"), + ) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_alerts.py b/tests/test_alerts.py new file mode 100644 index 0000000..4e55d4a --- /dev/null +++ b/tests/test_alerts.py @@ -0,0 +1,49 @@ +"""Tests for alert dispatch — Discord and Telegram (mocked).""" + +from __future__ import annotations + +from unittest.mock import MagicMock, patch + +from perp_bot.infra.alerts import send_discord_alert, send_telegram_alert + + +class TestDiscordAlert: + def test_returns_false_if_no_url(self): + assert send_discord_alert("", "test") is False + + @patch("perp_bot.infra.alerts.urllib.request.urlopen") + def test_sends_payload(self, mock_urlopen): + mock_resp = MagicMock() + mock_resp.status = 204 + mock_resp.__enter__ = MagicMock(return_value=mock_resp) + mock_resp.__exit__ = MagicMock(return_value=False) + mock_urlopen.return_value = mock_resp + + result = send_discord_alert("https://example.com/webhook", "hello") + assert result is True + mock_urlopen.assert_called_once() + + +class TestTelegramAlert: + def test_returns_false_if_no_token(self): + assert send_telegram_alert("", "123", "test") is False + + def test_returns_false_if_no_chat_id(self): + assert send_telegram_alert("bot123", "", "test") is False + + @patch("perp_bot.infra.alerts.urllib.request.urlopen") + def test_sends_message(self, mock_urlopen): + mock_resp = MagicMock() + mock_resp.status = 200 + mock_resp.__enter__ = MagicMock(return_value=mock_resp) + mock_resp.__exit__ = MagicMock(return_value=False) + mock_urlopen.return_value = mock_resp + + result = send_telegram_alert("bot123", "456", "hello") + assert result is True + mock_urlopen.assert_called_once() + # Verify URL contains the bot token and message params + call_args = mock_urlopen.call_args + req = call_args[0][0] + assert "bot123" in req.full_url + assert "456" in req.full_url diff --git a/tests/test_backtest_cost_model.py b/tests/test_backtest_cost_model.py new file mode 100644 index 0000000..fb7981e --- /dev/null +++ b/tests/test_backtest_cost_model.py @@ -0,0 +1,112 @@ +"""Tests for backtest cost models — fees, slippage, and funding.""" + +from __future__ import annotations + +import pytest + +from perp_bot.backtest.cost_model import FeeModel, FundingModel, SlippageModel + +_HOUR_MS = 3_600_000 + + +# ── FeeModel ────────────────────────────────────────── + +class TestFeeModel: + def test_entry_fee_uses_maker_rate(self): + fm = FeeModel(maker_rate=0.00015, taker_rate=0.00045) + assert fm.entry_fee(10_000) == pytest.approx(1.50) + + def test_exit_fee_profit_uses_maker(self): + fm = FeeModel(maker_rate=0.00015, taker_rate=0.00045) + assert fm.exit_fee(10_000, is_stop_loss=False) == pytest.approx(1.50) + + def test_exit_fee_stop_loss_uses_taker(self): + fm = FeeModel(maker_rate=0.00015, taker_rate=0.00045) + assert fm.exit_fee(10_000, is_stop_loss=True) == pytest.approx(4.50) + + def test_zero_notional(self): + fm = FeeModel() + assert fm.entry_fee(0) == 0.0 + assert fm.exit_fee(0) == 0.0 + + +# ── SlippageModel ───────────────────────────────────── + +class TestSlippageModel: + def test_long_entry_price_increases(self): + sm = SlippageModel(min_pct=0.001, max_pct=0.001, seed=1) + result = sm.apply(100.0, "long", is_entry=True) + assert result > 100.0 + + def test_long_exit_price_decreases(self): + sm = SlippageModel(min_pct=0.001, max_pct=0.001, seed=1) + result = sm.apply(100.0, "long", is_entry=False) + assert result < 100.0 + + def test_short_entry_price_decreases(self): + sm = SlippageModel(min_pct=0.001, max_pct=0.001, seed=1) + result = sm.apply(100.0, "short", is_entry=True) + assert result < 100.0 + + def test_short_exit_price_increases(self): + sm = SlippageModel(min_pct=0.001, max_pct=0.001, seed=1) + result = sm.apply(100.0, "short", is_entry=False) + assert result > 100.0 + + def test_reproducible_with_same_seed(self): + sm1 = SlippageModel(seed=42) + sm2 = SlippageModel(seed=42) + assert sm1.apply(100.0, "long", True) == sm2.apply(100.0, "long", True) + + def test_different_seeds_produce_different_results(self): + sm1 = SlippageModel(seed=1) + sm2 = SlippageModel(seed=2) + assert sm1.apply(100.0, "long", True) != sm2.apply(100.0, "long", True) + + def test_slippage_within_bounds(self): + sm = SlippageModel(min_pct=0.0001, max_pct=0.0005, seed=42) + for _ in range(100): + result = sm.apply(1000.0, "long", is_entry=True) + slip = (result - 1000.0) / 1000.0 + assert 0.0001 <= slip <= 0.0005 + + +# ── FundingModel ────────────────────────────────────── + +class TestFundingModel: + def _make_rates(self, hours: list[int], rate: float) -> list[dict]: + return [{"time": h * _HOUR_MS, "rate": rate} for h in hours] + + def test_no_funding_within_same_hour(self): + rates = self._make_rates([0, 1, 2], 0.001) + fm = FundingModel(rates) + # Position opened and closed within the same hour + cost = fm.cost_between("long", 10_000, 0, _HOUR_MS - 1) + assert cost == 0.0 + + def test_long_pays_positive_rate(self): + rates = self._make_rates([0, 1, 2, 3], 0.001) + fm = FundingModel(rates) + cost = fm.cost_between("long", 10_000, 0, 3 * _HOUR_MS) + # Should cross 3 hourly boundaries: 1h, 2h, 3h + assert cost == pytest.approx(0.001 * 10_000 * 3) + + def test_short_receives_positive_rate(self): + rates = self._make_rates([0, 1, 2, 3], 0.001) + fm = FundingModel(rates) + cost = fm.cost_between("short", 10_000, 0, 3 * _HOUR_MS) + assert cost == pytest.approx(-0.001 * 10_000 * 3) + + def test_missing_rate_defaults_to_zero(self): + # Only rate at hour 1 + fm = FundingModel([{"time": _HOUR_MS, "rate": 0.005}]) + cost = fm.cost_between("long", 10_000, 0, 3 * _HOUR_MS) + # Only hour 1 has a rate + assert cost == pytest.approx(0.005 * 10_000) + + def test_two_hour_hold(self): + rates = self._make_rates(range(24), 0.0001) + fm = FundingModel(rates) + # Open at 0.5h, close at 2.5h → crosses hour 1 and hour 2 + cost = fm.cost_between("long", 10_000, _HOUR_MS // 2, 5 * _HOUR_MS // 2) + assert cost == pytest.approx(0.0001 * 10_000 * 2) diff --git a/tests/test_backtest_engine.py b/tests/test_backtest_engine.py new file mode 100644 index 0000000..3374015 --- /dev/null +++ b/tests/test_backtest_engine.py @@ -0,0 +1,280 @@ +"""Integration tests for the backtest engine with synthetic data.""" + +from __future__ import annotations + +import math + +import numpy as np +import pytest + +from perp_bot.backtest.config import BacktestConfig +from perp_bot.backtest.cost_model import FeeModel, FundingModel, SlippageModel +from perp_bot.backtest.engine import BacktestEngine +from perp_bot.backtest.executor import BacktestExecutor +from perp_bot.backtest.risk_adapter import BacktestRiskManager +from perp_bot.config import ( + BotConfig, + DataConfig, + ExecutionConfig, + RiskConfig, + SignalConfig, + TradingConfig, +) +from perp_bot.data.db import Database +from perp_bot.signals.prediction import PredictionRegime + + +def _make_config() -> BotConfig: + return BotConfig( + trading=TradingConfig( + symbols=["ETH"], leverage=3, capital_usd=670.0, margin_usage_limit=0.5, + ), + signals=SignalConfig( + zscore_lookback=20, zscore_entry_threshold=2.0, zscore_exit_threshold=0.3, + zscore_stop_threshold=3.0, bollinger_period=20, bollinger_std=2.0, + rsi_period=14, rsi_overbought=70, rsi_oversold=30, + adx_period=14, adx_threshold=25, + ), + risk=RiskConfig( + max_loss_per_trade_pct=0.03, daily_loss_limit_pct=0.08, + max_positions=1, cooldown_seconds=1800, position_timeout_hours=24, + ), + data=DataConfig( + timeframes=["15m"], primary_timeframe="15m", history_days=90, + db_path=":memory:", + ), + execution=ExecutionConfig( + order_type="limit", taker_fallback_seconds=30, use_server_side_stop=True, + ), + mode="paper", + ) + + +def _make_bt_config(**overrides) -> BacktestConfig: + defaults = dict( + entry_delay_candles=0, + slippage_min_pct=0.0, + slippage_max_pct=0.0, + ) + defaults.update(overrides) + return BacktestConfig(**defaults) + + +def _generate_mean_reverting_candles( + n_candles: int = 200, + base_price: float = 2000.0, + amplitude: float = 100.0, + period: int = 40, + start_time_ms: int = 0, + interval_ms: int = 900_000, # 15m candles +) -> list[dict]: + """Generate synthetic candles with mean-reverting price action.""" + candles = [] + np.random.seed(42) + for i in range(n_candles): + # Sinusoidal mean-reversion pattern + price = base_price + amplitude * math.sin(2 * math.pi * i / period) + noise = np.random.normal(0, 2) + close = price + noise + high = close + abs(np.random.normal(0, 3)) + low = close - abs(np.random.normal(0, 3)) + open_price = close + np.random.normal(0, 1) + candles.append({ + "symbol": "ETH", + "timeframe": "15m", + "open_time": start_time_ms + i * interval_ms, + "open": open_price, + "high": max(high, open_price, close), + "low": min(low, open_price, close), + "close": close, + "volume": 1000.0 + abs(np.random.normal(0, 200)), + "num_trades": 100, + }) + return candles + + +class TestBacktestExecutor: + def test_open_and_close_long(self): + fm = FeeModel(0.00015, 0.00045) + sm = SlippageModel(0.0, 0.0, seed=1) # no slippage + funding = FundingModel([]) + executor = BacktestExecutor(fm, sm, funding) + + executor.open_position("ETH", "long", 1000.0, 2000.0, 0) + assert executor.has_position + + record = executor.close_position(2100.0, 3_600_000, "profit") + assert not executor.has_position + assert record.side == "long" + assert record.pnl > 0 # price went up + assert record.entry_fee == pytest.approx(0.15) + assert record.exit_fee == pytest.approx(0.15) + assert record.net_pnl < record.pnl # fees reduce net + + def test_stop_loss_uses_taker_fee(self): + fm = FeeModel(0.00015, 0.00045) + sm = SlippageModel(0.0, 0.0, seed=1) + executor = BacktestExecutor(fm, sm, FundingModel([])) + + executor.open_position("ETH", "long", 1000.0, 2000.0, 0) + record = executor.close_position(1900.0, 3_600_000, "stop_loss", is_stop_loss=True) + assert record.exit_fee == pytest.approx(0.45) # taker rate + + def test_short_pnl_calculation(self): + fm = FeeModel(0.0, 0.0) # no fees for clarity + sm = SlippageModel(0.0, 0.0, seed=1) + executor = BacktestExecutor(fm, sm, FundingModel([])) + + executor.open_position("ETH", "short", 1000.0, 2000.0, 0) + record = executor.close_position(1800.0, 3_600_000, "profit") + # (2000-1800)/2000 * 1000 = 100 + assert record.pnl == pytest.approx(100.0) + + +class TestBacktestRiskManager: + def test_cooldown_blocks_entry(self): + config = _make_config() + risk = BacktestRiskManager(config) + # Record a stop-loss at time 0 + risk.record_trade_close(-20.0, 0, is_stop_loss=True) + # 1800s cooldown = 1_800_000ms + assert risk.check_entry(1_000_000) is False # within cooldown + assert risk.check_entry(1_800_001) is True # after cooldown + + def test_daily_loss_limit(self): + config = _make_config() + risk = BacktestRiskManager(config) + day_ms = 86_400_000 + # Max daily loss = 670 * 0.08 = $53.60 + risk.record_trade_close(-30.0, day_ms) + assert risk.check_entry(day_ms + 1000) is True + risk.record_trade_close(-30.0, day_ms + 2000) + # Total = -60 > -53.60 limit + assert risk.check_entry(day_ms + 3000) is False + # Next day resets + assert risk.check_entry(2 * day_ms + 1000) is True + + def test_position_timeout(self): + config = _make_config() + risk = BacktestRiskManager(config) + entry = 0 + # 24h = 86_400_000ms + assert risk.check_position_timeout(entry, 86_000_000) is False + assert risk.check_position_timeout(entry, 86_400_001) is True + + def test_crisis_blocks_sizing(self): + config = _make_config() + risk = BacktestRiskManager(config) + assert risk.compute_position_size(PredictionRegime.CRISIS) == 0.0 + assert risk.compute_position_size(PredictionRegime.NORMAL) > 0.0 + + +class TestBacktestEngine: + def test_runs_without_error(self): + """Integration test: engine runs on synthetic data and produces results.""" + config = _make_config() + bt_config = _make_bt_config() + db = Database(":memory:") + + candles = _generate_mean_reverting_candles() + db.insert_candles(candles) + + engine = BacktestEngine(config, bt_config) + result = engine.run(db, "ETH") + + assert result.equity_curve + assert result.metrics["total_trades"] >= 0 + db.close() + + def test_empty_data_returns_empty_result(self): + config = _make_config() + bt_config = _make_bt_config() + db = Database(":memory:") + + engine = BacktestEngine(config, bt_config) + result = engine.run(db, "ETH") + assert result.trades == [] + assert result.equity_curve == [] + db.close() + + def test_reproducible_with_same_seed(self): + """Two runs with same seed produce identical trades.""" + config = _make_config() + bt_config = _make_bt_config(seed=42) + db = Database(":memory:") + candles = _generate_mean_reverting_candles() + db.insert_candles(candles) + + engine1 = BacktestEngine(config, bt_config) + result1 = engine1.run(db, "ETH") + + engine2 = BacktestEngine(config, bt_config) + result2 = engine2.run(db, "ETH") + + assert len(result1.trades) == len(result2.trades) + for t1, t2 in zip(result1.trades, result2.trades): + assert t1.entry_price == t2.entry_price + assert t1.exit_price == t2.exit_price + assert t1.net_pnl == t2.net_pnl + db.close() + + def test_costs_are_applied(self): + """Verify that fees and slippage actually reduce net P&L.""" + config = _make_config() + # Run with zero costs + bt_zero = _make_bt_config( + maker_fee_rate=0.0, taker_fee_rate=0.0, + slippage_min_pct=0.0, slippage_max_pct=0.0, + ) + # Run with real costs + bt_real = _make_bt_config( + maker_fee_rate=0.00015, taker_fee_rate=0.00045, + slippage_min_pct=0.0001, slippage_max_pct=0.0005, + ) + + db = Database(":memory:") + candles = _generate_mean_reverting_candles() + db.insert_candles(candles) + + result_zero = BacktestEngine(config, bt_zero).run(db, "ETH") + result_real = BacktestEngine(config, bt_real).run(db, "ETH") + + # If there are trades, costs should reduce the total net pnl + if result_zero.trades and result_real.trades: + assert result_real.metrics["total_net_pnl"] <= result_zero.metrics["total_net_pnl"] + db.close() + + def test_entry_delay_can_cancel_order(self): + """With entry_delay=1 and cancel_if_signal_gone, some orders get cancelled.""" + config = _make_config() + bt_instant = _make_bt_config(entry_delay_candles=0) + bt_delayed = _make_bt_config(entry_delay_candles=1, cancel_if_signal_gone=True) + + db = Database(":memory:") + candles = _generate_mean_reverting_candles() + db.insert_candles(candles) + + result_instant = BacktestEngine(config, bt_instant).run(db, "ETH") + result_delayed = BacktestEngine(config, bt_delayed).run(db, "ETH") + + # Delayed execution with cancellation should have <= trades than instant + assert result_delayed.metrics["total_trades"] <= result_instant.metrics["total_trades"] + db.close() + + def test_force_close_at_end(self): + """Any open position is force-closed at backtest end.""" + config = _make_config() + bt_config = _make_bt_config() + db = Database(":memory:") + + # Use small dataset so we likely have an open position at end + candles = _generate_mean_reverting_candles(n_candles=60) + db.insert_candles(candles) + + engine = BacktestEngine(config, bt_config) + result = engine.run(db, "ETH") + + # All trades should be closed (have exit_reason) + for t in result.trades: + assert t.exit_reason is not None + db.close() diff --git a/tests/test_backtest_metrics.py b/tests/test_backtest_metrics.py new file mode 100644 index 0000000..5a7bf07 --- /dev/null +++ b/tests/test_backtest_metrics.py @@ -0,0 +1,148 @@ +"""Tests for backtest performance metrics.""" + +from __future__ import annotations + +import pytest + +from perp_bot.backtest.metrics import ( + avg_holding_hours, + calmar_ratio, + compute_all, + expected_value, + max_drawdown, + profit_factor, + sharpe_ratio, + win_rate, +) +from perp_bot.backtest.results import TradeRecord + + +def _trade(net_pnl: float, entry_ms: int = 0, exit_ms: int = 3_600_000) -> TradeRecord: + """Helper to create a minimal TradeRecord for metric tests.""" + return TradeRecord( + id=1, symbol="ETH", side="long", + entry_time_ms=entry_ms, exit_time_ms=exit_ms, + entry_price=100.0, exit_price=101.0, + raw_entry_price=100.0, raw_exit_price=101.0, + size_usd=1000.0, pnl=net_pnl + 1.0, # approximate + entry_fee=0.5, exit_fee=0.5, funding_cost=0.0, + net_pnl=net_pnl, exit_reason="test", + ) + + +class TestWinRate: + def test_all_winners(self): + trades = [_trade(10), _trade(5), _trade(1)] + assert win_rate(trades) == pytest.approx(1.0) + + def test_all_losers(self): + trades = [_trade(-10), _trade(-5)] + assert win_rate(trades) == pytest.approx(0.0) + + def test_mixed(self): + trades = [_trade(10), _trade(-5), _trade(3)] + assert win_rate(trades) == pytest.approx(2 / 3) + + def test_empty(self): + assert win_rate([]) == 0.0 + + def test_zero_pnl_not_winner(self): + trades = [_trade(0.0)] + assert win_rate(trades) == 0.0 + + +class TestExpectedValue: + def test_positive(self): + trades = [_trade(10), _trade(20)] + assert expected_value(trades) == pytest.approx(15.0) + + def test_empty(self): + assert expected_value([]) == 0.0 + + +class TestProfitFactor: + def test_normal(self): + trades = [_trade(20), _trade(-10)] + assert profit_factor(trades) == pytest.approx(2.0) + + def test_no_losses(self): + trades = [_trade(10), _trade(5)] + assert profit_factor(trades) == float("inf") + + def test_no_wins_no_losses(self): + assert profit_factor([]) == 0.0 + + +class TestSharpeRatio: + def test_constant_equity(self): + curve = [100.0, 100.0, 100.0, 100.0] + assert sharpe_ratio(curve) == 0.0 + + def test_steadily_increasing(self): + # Daily 1% return → high Sharpe + curve = [100.0] + for _ in range(100): + curve.append(curve[-1] * 1.01) + s = sharpe_ratio(curve) + assert s > 5.0 # very high because no variance + + def test_too_short(self): + assert sharpe_ratio([100.0]) == 0.0 + assert sharpe_ratio([]) == 0.0 + + +class TestMaxDrawdown: + def test_no_drawdown(self): + curve = [100, 101, 102, 103] + pct, usd = max_drawdown(curve) + assert pct == 0.0 + assert usd == 0.0 + + def test_known_drawdown(self): + curve = [100, 110, 90, 95, 105] + pct, usd = max_drawdown(curve) + # Peak=110, trough=90 → dd=20/110 + assert usd == pytest.approx(20.0) + assert pct == pytest.approx(20 / 110) + + def test_empty(self): + pct, usd = max_drawdown([]) + assert pct == 0.0 + + +class TestCalmarRatio: + def test_normal(self): + # 20% annual return, 10% max dd, 1 year + assert calmar_ratio(0.20, 0.10, 1.0) == pytest.approx(2.0) + + def test_zero_drawdown(self): + assert calmar_ratio(0.20, 0.0, 1.0) == 0.0 + + +class TestAvgHoldingHours: + def test_known_holding(self): + t1 = _trade(10, entry_ms=0, exit_ms=2 * 3_600_000) # 2h + t2 = _trade(5, entry_ms=0, exit_ms=4 * 3_600_000) # 4h + assert avg_holding_hours([t1, t2]) == pytest.approx(3.0) + + def test_empty(self): + assert avg_holding_hours([]) == 0.0 + + +class TestComputeAll: + def test_returns_all_keys(self): + trades = [_trade(10), _trade(-5)] + curve = [100, 102, 98, 105] + result = compute_all(trades, curve, 100.0) + expected_keys = { + "total_trades", "win_rate", "expected_value", "profit_factor", + "sharpe_ratio", "max_drawdown_pct", "max_drawdown_usd", + "calmar_ratio", "avg_holding_hours", "total_net_pnl", + "total_fees", "total_funding", "total_slippage", "total_return_pct", + } + assert set(result.keys()) == expected_keys + + def test_total_net_pnl(self): + trades = [_trade(10), _trade(-3)] + result = compute_all(trades, [100, 107], 100.0) + assert result["total_net_pnl"] == pytest.approx(7.0) diff --git a/tests/test_indicators.py b/tests/test_indicators.py new file mode 100644 index 0000000..3699d20 --- /dev/null +++ b/tests/test_indicators.py @@ -0,0 +1,127 @@ +"""Tests for self-implemented technical indicators.""" + +import numpy as np +import pandas as pd + +from perp_bot.signals.indicators import adx, bollinger_bands, hurst_exponent, rsi, zscore + + +def _random_prices(n: int = 100, seed: int = 42) -> pd.Series: + rng = np.random.default_rng(seed) + returns = rng.normal(0, 0.02, n) + prices = 100 * np.exp(np.cumsum(returns)) + return pd.Series(prices) + + +class TestZScore: + def test_mean_returns_zero(self): + """A constant series should have Z-score of 0.""" + series = pd.Series([100.0] * 30) + z = zscore(series, 20) + # Z-score is NaN where std=0 (constant series) + assert z.iloc[-1] != z.iloc[-1] or z.iloc[-1] == 0 # NaN or 0 + + def test_above_mean_is_positive(self): + series = pd.Series(list(range(50, 70)) + [100]) + z = zscore(series, 20) + assert z.iloc[-1] > 0 + + def test_below_mean_is_negative(self): + series = pd.Series(list(range(50, 70)) + [30]) + z = zscore(series, 20) + assert z.iloc[-1] < 0 + + +class TestBollingerBands: + def test_upper_above_lower(self): + series = _random_prices() + mid, upper, lower = bollinger_bands(series, 20, 2.0) + valid = mid.dropna().index + assert (upper[valid] >= mid[valid]).all() + assert (mid[valid] >= lower[valid]).all() + + def test_band_width_scales_with_std(self): + series = _random_prices() + _, upper1, lower1 = bollinger_bands(series, 20, 1.0) + _, upper2, lower2 = bollinger_bands(series, 20, 2.0) + valid = upper1.dropna().index + width1 = (upper1 - lower1)[valid] + width2 = (upper2 - lower2)[valid] + assert (width2 > width1).all() + + +class TestRSI: + def test_uptrend_rsi_high(self): + """Steadily rising prices should give RSI > 50.""" + series = pd.Series(np.linspace(100, 150, 50)) + r = rsi(series, 14) + assert r.iloc[-1] > 70 + + def test_downtrend_rsi_low(self): + series = pd.Series(np.linspace(150, 100, 50)) + r = rsi(series, 14) + assert r.iloc[-1] < 30 + + def test_rsi_bounded(self): + series = _random_prices(200) + r = rsi(series, 14) + valid = r.dropna() + assert (valid >= 0).all() + assert (valid <= 100).all() + + +class TestHurstExponent: + def test_random_walk_near_half(self): + """A random walk should have Hurst exponent close to 0.5.""" + rng = np.random.default_rng(42) + prices = 100 * np.exp(np.cumsum(rng.normal(0, 0.01, 500))) + h = hurst_exponent(pd.Series(prices)) + assert 0.35 < h < 0.65 + + def test_mean_reverting_below_half(self): + """A strongly mean-reverting series should have H < 0.5.""" + rng = np.random.default_rng(99) + # Generate mean-reverting prices (Ornstein-Uhlenbeck-like) + n = 500 + prices = np.zeros(n) + prices[0] = 100 + for i in range(1, n): + prices[i] = prices[i - 1] + 0.3 * (100 - prices[i - 1]) + rng.normal(0, 0.5) + h = hurst_exponent(pd.Series(prices)) + assert h < 0.5 + + def test_trending_above_half(self): + """A trending series should have H > 0.5.""" + prices = pd.Series(np.linspace(100, 200, 500)) + h = hurst_exponent(prices) + assert h > 0.5 + + def test_short_series_returns_default(self): + """Too few data points should return 0.5 (random walk assumption).""" + h = hurst_exponent(pd.Series([100, 101, 102])) + assert h == 0.5 + + +class TestADX: + def test_trending_market_high_adx(self): + """Strong trend should produce ADX > 25.""" + n = 100 + high = pd.Series(np.linspace(100, 200, n)) + 1 + low = pd.Series(np.linspace(100, 200, n)) - 1 + close = pd.Series(np.linspace(100, 200, n)) + a = adx(high, low, close, 14) + assert a.iloc[-1] > 25 + + def test_range_bound_low_adx(self): + """Choppy sideways market should produce lower ADX.""" + rng = np.random.default_rng(123) + n = 200 + base = 100 + rng.normal(0, 0.5, n).cumsum() + # Mean-revert aggressively + base = 100 + (base - base.mean()) * 0.1 + high = pd.Series(base + rng.uniform(0.5, 1.5, n)) + low = pd.Series(base - rng.uniform(0.5, 1.5, n)) + close = pd.Series(base) + a = adx(high, low, close, 14) + # Range-bound ADX should be lower than trending + assert a.iloc[-1] < 30 diff --git a/tests/test_ipc.py b/tests/test_ipc.py new file mode 100644 index 0000000..2cea213 --- /dev/null +++ b/tests/test_ipc.py @@ -0,0 +1,196 @@ +"""Tests for the IPC layer — protocol, state, server, client.""" + +from __future__ import annotations + +import os +import tempfile +import time + +import pytest + +from perp_bot.ipc.client import DaemonClient +from perp_bot.ipc.protocol import ( + SOCKET_NAME, + get_socket_path, +) +from perp_bot.ipc.server import DaemonStateServer +from perp_bot.ipc.state import DaemonState + +# ── Protocol ────────────────────────────────────────── + + +class TestProtocol: + def test_socket_path_derives_from_db(self, tmp_path): + db_path = tmp_path / "data" / "bot.db" + db_path.parent.mkdir(parents=True) + result = get_socket_path(str(db_path)) + assert result == db_path.parent / SOCKET_NAME + + def test_socket_name_constant(self): + assert SOCKET_NAME == "perp-bot.sock" + + +# ── DaemonState ─────────────────────────────────────── + + +class TestDaemonState: + def test_default_state(self): + state = DaemonState() + assert state.mode == "paper" + assert state.paused is False + assert state.tick_count == 0 + assert state.ws_healthy is False + + def test_snapshot_returns_dict(self): + state = DaemonState(mode="live", tick_count=42) + snap = state.snapshot() + assert isinstance(snap, dict) + assert snap["mode"] == "live" + assert snap["tick_count"] == 42 + assert "uptime_seconds" in snap + + def test_snapshot_is_deep_copy(self): + state = DaemonState() + state.mid_prices["ETH"] = 3200.0 + snap = state.snapshot() + state.mid_prices["ETH"] = 9999.0 + assert snap["mid_prices"]["ETH"] == 3200.0 + + def test_update_multiple_fields(self): + state = DaemonState() + state.update(mode="live", paused=True, tick_count=10) + assert state.mode == "live" + assert state.paused is True + assert state.tick_count == 10 + + def test_update_ignores_private_fields(self): + state = DaemonState() + state.update(_lock="hacked") + import threading + assert isinstance(state._lock, threading.Lock) + + +# ── Server + Client round-trip ──────────────────────── + + +@pytest.fixture +def short_sock_path(): + """Create a short socket path that fits in AF_UNIX's 104-byte limit.""" + # /tmp/pb-XXXXXXXX/pb.sock is well under the limit + tmpdir = tempfile.mkdtemp(prefix="pb-", dir="/tmp") + from pathlib import Path + sock = Path(tmpdir) / "pb.sock" + yield sock + # Cleanup + if sock.exists(): + sock.unlink() + os.rmdir(tmpdir) + + +@pytest.fixture +def ipc_pair(short_sock_path): + """Start server + client pair for testing, auto-cleanup.""" + state = DaemonState(mode="paper") + server = DaemonStateServer(short_sock_path, state) + server.start() + client = DaemonClient(short_sock_path) + time.sleep(0.1) + yield state, server, client + server.stop() + + +class TestServerClient: + def test_get_state(self, ipc_pair): + state, _server, client = ipc_pair + state.update(tick_count=5, ws_healthy=True) + result = client.get_state() + assert result is not None + assert result["tick_count"] == 5 + assert result["ws_healthy"] is True + + def test_pause_resume(self, ipc_pair): + state, _server, client = ipc_pair + assert state.paused is False + + resp = client.pause() + assert resp["ok"] is True + assert state.paused is True + + resp = client.resume() + assert resp["ok"] is True + assert state.paused is False + + def test_is_running(self, ipc_pair): + _state, _server, client = ipc_pair + assert client.is_running() is True + + def test_is_running_no_server(self, tmp_path): + sock_path = tmp_path / "nonexistent.sock" + client = DaemonClient(sock_path) + assert client.is_running() is False + + def test_unknown_command(self, ipc_pair): + _state, _server, client = ipc_pair + result = client._request({"cmd": "bogus"}) + assert result is not None + assert result["ok"] is False + assert "unknown_command" in result.get("error", "") + + def test_emergency_close_no_executor(self, ipc_pair): + _state, _server, client = ipc_pair + result = client.emergency_close("ETH") + assert result is not None + assert result["ok"] is False + assert "executor" in result.get("error", "") + + def test_emergency_close_no_open_trades(self, ipc_pair): + _state, server, client = ipc_pair + from unittest.mock import MagicMock + mock_db = MagicMock() + mock_db.get_open_trades.return_value = [] + server._db = mock_db + server._executor = MagicMock() + + result = client.emergency_close("ETH") + assert result["ok"] is True + assert "no_open_trades" in result.get("message", "") + + def test_server_cleans_stale_socket(self, short_sock_path): + short_sock_path.touch() + state = DaemonState() + server = DaemonStateServer(short_sock_path, state) + server.start() + time.sleep(0.1) + + client = DaemonClient(short_sock_path) + assert client.is_running() is True + server.stop() + + def test_multiple_sequential_requests(self, ipc_pair): + state, _server, client = ipc_pair + for i in range(5): + state.update(tick_count=i) + result = client.get_state() + assert result["tick_count"] == i + + def test_client_handles_dead_server(self, short_sock_path): + state = DaemonState() + server = DaemonStateServer(short_sock_path, state) + server.start() + time.sleep(0.1) + client = DaemonClient(short_sock_path) + + server.stop() + time.sleep(0.1) + + result = client.get_state() + assert result is None + + def test_server_stop_removes_socket(self, short_sock_path): + state = DaemonState() + server = DaemonStateServer(short_sock_path, state) + server.start() + time.sleep(0.1) + assert short_sock_path.exists() + server.stop() + assert not short_sock_path.exists() diff --git a/tests/test_live_executor.py b/tests/test_live_executor.py new file mode 100644 index 0000000..affa5e6 --- /dev/null +++ b/tests/test_live_executor.py @@ -0,0 +1,375 @@ +"""Tests for the LiveExecutor — unit tests using mocked SDK.""" + +from __future__ import annotations + +from unittest.mock import MagicMock, patch + +from perp_bot.config import ( + BotConfig, + DataConfig, + ExecutionConfig, + RiskConfig, + SignalConfig, + TradingConfig, +) +from perp_bot.data.db import Database +from perp_bot.execution.live_executor import ( + LiveExecutor, + _round_price, + _round_size, +) + + +def _make_config() -> BotConfig: + return BotConfig( + trading=TradingConfig( + symbols=["ETH"], leverage=3, + capital_usd=670.0, margin_usage_limit=0.5, + ), + signals=SignalConfig( + zscore_lookback=20, zscore_entry_threshold=2.0, + zscore_exit_threshold=0.3, zscore_stop_threshold=3.0, + bollinger_period=20, bollinger_std=2.0, + rsi_period=14, rsi_overbought=70, rsi_oversold=30, + adx_period=14, adx_threshold=25, + ), + risk=RiskConfig( + max_loss_per_trade_pct=0.03, daily_loss_limit_pct=0.08, + max_positions=1, cooldown_seconds=1800, + position_timeout_hours=24, + ), + data=DataConfig( + timeframes=["15m"], primary_timeframe="15m", + history_days=90, db_path=":memory:", + ), + execution=ExecutionConfig( + order_type="limit", taker_fallback_seconds=30, + use_server_side_stop=True, + ), + mode="live", + hl_private_key="0x" + "ab" * 32, + hl_wallet_address="0x" + "cd" * 20, + ) + + +class TestRoundPrice: + def test_round_to_5_sig_figs(self): + assert _round_price(2543.67891) == 2543.7 + assert _round_price(0.00012345) == 0.00012345 + + def test_zero(self): + assert _round_price(0) == 0.0 + + +class TestRoundSize: + def test_round_to_decimals(self): + assert _round_size(0.12345, 3) == 0.123 + assert _round_size(0.12345, 2) == 0.12 + assert _round_size(1.5, 0) == 2.0 + + +class TestLiveExecutorInit: + @patch("perp_bot.execution.live_executor.Exchange") + @patch("perp_bot.execution.live_executor.eth_account.Account.from_key") + def test_init_creates_exchange(self, mock_from_key, mock_exchange): + mock_wallet = MagicMock() + mock_wallet.address = "0x" + "ab" * 20 + mock_from_key.return_value = mock_wallet + + mock_ex = MagicMock() + mock_info = MagicMock() + mock_info.meta_and_asset_ctxs.return_value = [ + {"universe": [{"name": "ETH", "szDecimals": 4}]} + ] + mock_ex.info = mock_info + mock_exchange.return_value = mock_ex + + config = _make_config() + db = Database(":memory:") + + executor = LiveExecutor(config, db) + assert executor._sz_decimals.get("ETH") == 4 + mock_exchange.assert_called_once() + db.close() + + +class TestLiveExecutorOpenPosition: + @patch("perp_bot.execution.live_executor.Exchange") + @patch("perp_bot.execution.live_executor.eth_account.Account.from_key") + def test_open_position_calls_bulk_orders( + self, mock_from_key, mock_exchange + ): + mock_wallet = MagicMock() + mock_wallet.address = "0x" + "ab" * 20 + mock_from_key.return_value = mock_wallet + + mock_ex = MagicMock() + mock_info = MagicMock() + mock_info.meta_and_asset_ctxs.return_value = [ + {"universe": [{"name": "ETH", "szDecimals": 3}]} + ] + mock_ex.info = mock_info + + # Simulate a filled order response + mock_ex.bulk_orders.return_value = { + "response": { + "data": { + "statuses": [ + {"filled": {"oid": 123, "avgPx": "2500.0"}}, + {"resting": {"oid": 456}}, + ] + } + } + } + mock_exchange.return_value = mock_ex + + config = _make_config() + db = Database(":memory:") + executor = LiveExecutor(config, db) + + trade_id = executor.open_position("ETH", "long", 1005.0, 2500.0) + + # Should have placed bulk_orders with entry + SL + mock_ex.bulk_orders.assert_called_once() + orders = mock_ex.bulk_orders.call_args[0][0] + assert len(orders) == 2 + assert orders[0]["is_buy"] is True + assert orders[1]["order_type"]["trigger"]["tpsl"] == "sl" + + # Trade should be recorded in DB + assert trade_id is not None + trades = db.get_open_trades("ETH") + assert len(trades) == 1 + assert trades[0]["side"] == "long" + db.close() + + +class TestLiveExecutorClosePosition: + @patch("perp_bot.execution.live_executor.Exchange") + @patch("perp_bot.execution.live_executor.eth_account.Account.from_key") + def test_close_position_calls_market_close( + self, mock_from_key, mock_exchange + ): + mock_wallet = MagicMock() + mock_wallet.address = "0x" + "ab" * 20 + mock_from_key.return_value = mock_wallet + + mock_ex = MagicMock() + mock_info = MagicMock() + mock_info.meta_and_asset_ctxs.return_value = [{"universe": []}] + mock_info.frontend_open_orders.return_value = [] + mock_ex.info = mock_info + mock_ex.market_close.return_value = { + "response": { + "data": { + "statuses": [{"filled": {"avgPx": "2600.0"}}] + } + } + } + mock_exchange.return_value = mock_ex + + config = _make_config() + db = Database(":memory:") + executor = LiveExecutor(config, db) + + # Insert a trade to close + trade_id = db.insert_trade({ + "symbol": "ETH", "side": "long", "entry_time": 1000000, + "entry_price": 2500.0, "size_usd": 1005.0, "is_paper": 0, + }) + + result = executor.close_position( + trade_id, "ETH", 2600.0, 40.0, "mean_reversion_complete" + ) + + assert result is True + mock_ex.market_close.assert_called_once() + # Verify trigger orders were checked for cancellation + mock_info.frontend_open_orders.assert_called_once() + db.close() + + +class TestFillPriceExtraction: + @patch("perp_bot.execution.live_executor.Exchange") + @patch("perp_bot.execution.live_executor.eth_account.Account.from_key") + def test_get_fill_price_uses_vwap_from_fills( + self, mock_from_key, mock_exchange + ): + mock_wallet = MagicMock() + mock_wallet.address = "0x" + "ab" * 20 + mock_from_key.return_value = mock_wallet + + mock_ex = MagicMock() + mock_info = MagicMock() + mock_info.meta_and_asset_ctxs.return_value = [{"universe": []}] + # Simulate two partial fills at different prices + mock_info.user_fills_by_time.return_value = [ + {"oid": 100, "coin": "ETH", "px": "2500.0", "sz": "0.3"}, + {"oid": 100, "coin": "ETH", "px": "2502.0", "sz": "0.2"}, + {"oid": 999, "coin": "ETH", "px": "9999.0", "sz": "1.0"}, # different oid + ] + mock_ex.info = mock_info + mock_exchange.return_value = mock_ex + + config = _make_config() + db = Database(":memory:") + executor = LiveExecutor(config, db) + + price = executor._get_fill_price("ETH", 100) + # VWAP = (2500*0.3 + 2502*0.2) / 0.5 = (750 + 500.4) / 0.5 = 2500.8 + assert price is not None + assert abs(price - 2500.8) < 0.01 + db.close() + + +class TestSlPlacementEscalation: + @patch("perp_bot.execution.live_executor.Exchange") + @patch("perp_bot.execution.live_executor.eth_account.Account.from_key") + def test_sl_retries_once_on_failure(self, mock_from_key, mock_exchange): + mock_wallet = MagicMock() + mock_wallet.address = "0x" + "ab" * 20 + mock_from_key.return_value = mock_wallet + + mock_ex = MagicMock() + mock_info = MagicMock() + mock_info.meta_and_asset_ctxs.return_value = [{"universe": []}] + mock_ex.info = mock_info + # First call fails, second succeeds + mock_ex.order.side_effect = [Exception("API error"), MagicMock()] + mock_exchange.return_value = mock_ex + + config = _make_config() + db = Database(":memory:") + executor = LiveExecutor(config, db) + + result = executor._place_server_side_sl("ETH", True, 0.5, 2400.0) + assert result is True + assert mock_ex.order.call_count == 2 + db.close() + + @patch("perp_bot.execution.live_executor.Exchange") + @patch("perp_bot.execution.live_executor.eth_account.Account.from_key") + def test_sl_returns_false_after_two_failures(self, mock_from_key, mock_exchange): + mock_wallet = MagicMock() + mock_wallet.address = "0x" + "ab" * 20 + mock_from_key.return_value = mock_wallet + + mock_ex = MagicMock() + mock_info = MagicMock() + mock_info.meta_and_asset_ctxs.return_value = [{"universe": []}] + mock_ex.info = mock_info + mock_ex.order.side_effect = Exception("API down") + mock_exchange.return_value = mock_ex + + config = _make_config() + db = Database(":memory:") + executor = LiveExecutor(config, db) + + result = executor._place_server_side_sl("ETH", True, 0.5, 2400.0) + assert result is False + assert mock_ex.order.call_count == 2 + db.close() + + +class TestLeverageValidation: + @patch("perp_bot.execution.live_executor.Exchange") + @patch("perp_bot.execution.live_executor.eth_account.Account.from_key") + def test_set_leverage_returns_true_on_success(self, mock_from_key, mock_exchange): + mock_wallet = MagicMock() + mock_wallet.address = "0x" + "ab" * 20 + mock_from_key.return_value = mock_wallet + + mock_ex = MagicMock() + mock_info = MagicMock() + mock_info.meta_and_asset_ctxs.return_value = [{"universe": []}] + mock_ex.info = mock_info + mock_exchange.return_value = mock_ex + + config = _make_config() + db = Database(":memory:") + executor = LiveExecutor(config, db) + assert executor.set_leverage("ETH", 3) is True + db.close() + + @patch("perp_bot.execution.live_executor.Exchange") + @patch("perp_bot.execution.live_executor.eth_account.Account.from_key") + def test_set_leverage_returns_false_on_failure(self, mock_from_key, mock_exchange): + mock_wallet = MagicMock() + mock_wallet.address = "0x" + "ab" * 20 + mock_from_key.return_value = mock_wallet + + mock_ex = MagicMock() + mock_info = MagicMock() + mock_info.meta_and_asset_ctxs.return_value = [{"universe": []}] + mock_ex.info = mock_info + mock_ex.update_leverage.side_effect = Exception("nope") + mock_exchange.return_value = mock_ex + + config = _make_config() + db = Database(":memory:") + executor = LiveExecutor(config, db) + assert executor.set_leverage("ETH", 3) is False + db.close() + + +class TestLiveExecutorPositionQuery: + @patch("perp_bot.execution.live_executor.Exchange") + @patch("perp_bot.execution.live_executor.eth_account.Account.from_key") + def test_get_exchange_position_returns_position( + self, mock_from_key, mock_exchange + ): + mock_wallet = MagicMock() + mock_wallet.address = "0xtest" + mock_from_key.return_value = mock_wallet + + mock_ex = MagicMock() + mock_info = MagicMock() + mock_info.meta_and_asset_ctxs.return_value = [{"universe": []}] + mock_info.user_state.return_value = { + "assetPositions": [ + { + "position": { + "coin": "ETH", + "szi": "0.5", + "entryPx": "2500.0", + "unrealizedPnl": "25.0", + } + } + ] + } + mock_ex.info = mock_info + mock_exchange.return_value = mock_ex + + config = _make_config() + db = Database(":memory:") + executor = LiveExecutor(config, db) + pos = executor.get_exchange_position("ETH") + + assert pos is not None + assert pos["side"] == "long" + assert pos["size_base"] == 0.5 + assert pos["entry_price"] == 2500.0 + db.close() + + @patch("perp_bot.execution.live_executor.Exchange") + @patch("perp_bot.execution.live_executor.eth_account.Account.from_key") + def test_get_exchange_position_returns_none_if_empty( + self, mock_from_key, mock_exchange + ): + mock_wallet = MagicMock() + mock_wallet.address = "0xtest" + mock_from_key.return_value = mock_wallet + + mock_ex = MagicMock() + mock_info = MagicMock() + mock_info.meta_and_asset_ctxs.return_value = [{"universe": []}] + mock_info.user_state.return_value = {"assetPositions": []} + mock_ex.info = mock_info + mock_exchange.return_value = mock_ex + + config = _make_config() + db = Database(":memory:") + executor = LiveExecutor(config, db) + pos = executor.get_exchange_position("ETH") + assert pos is None + db.close() diff --git a/tests/test_prediction.py b/tests/test_prediction.py new file mode 100644 index 0000000..8ffa5f3 --- /dev/null +++ b/tests/test_prediction.py @@ -0,0 +1,416 @@ +"""Tests for prediction market scoring and regime integration.""" + +import numpy as np +import pandas as pd + +from perp_bot.config import ( + BotConfig, + DataConfig, + ExecutionConfig, + PredictionConfig, + PredictionMarketDef, + RiskConfig, + SignalConfig, + TradingConfig, +) +from perp_bot.data.db import Database +from perp_bot.risk.manager import RiskManager +from perp_bot.signals.engine import Signal, SignalEngine +from perp_bot.signals.prediction import ( + PredictionRegime, + compute_regime, + funding_side_preference, + rate_change_score, + war_risk_score, +) + + +def _pred_config(**overrides) -> PredictionConfig: + defaults = { + "enabled": True, + "poll_interval_minutes": 15, + "war_risk_threshold": 0.4, + "war_risk_crisis_threshold": 0.7, + "rate_change_threshold": 0.3, + "position_size_reduction": 0.5, + "markets": [], + } + defaults.update(overrides) + return PredictionConfig(**defaults) + + +def _bot_config(prediction=None) -> BotConfig: + return BotConfig( + trading=TradingConfig( + symbols=["ETH"], leverage=3, + capital_usd=670.0, margin_usage_limit=0.5, + ), + signals=SignalConfig( + zscore_lookback=20, zscore_entry_threshold=2.0, + zscore_exit_threshold=0.3, zscore_stop_threshold=3.0, + bollinger_period=20, bollinger_std=2.0, + rsi_period=14, rsi_overbought=70, rsi_oversold=30, + adx_period=14, adx_threshold=25, + ), + risk=RiskConfig( + max_loss_per_trade_pct=0.03, daily_loss_limit_pct=0.08, + max_positions=1, cooldown_seconds=1800, + position_timeout_hours=24, + ), + data=DataConfig( + timeframes=["15m"], primary_timeframe="15m", + history_days=90, db_path=":memory:", + ), + execution=ExecutionConfig( + order_type="limit", taker_fallback_seconds=30, + use_server_side_stop=True, + ), + mode="paper", + prediction=prediction, + ) + + +def _signal_config() -> SignalConfig: + return SignalConfig( + zscore_lookback=20, zscore_entry_threshold=2.0, zscore_exit_threshold=0.3, + zscore_stop_threshold=3.0, bollinger_period=20, bollinger_std=2.0, + rsi_period=14, rsi_overbought=70, rsi_oversold=30, adx_period=14, adx_threshold=25, + ) + + +def _make_candles(close_prices: list[float], n_warmup: int = 50) -> pd.DataFrame: + rng = np.random.default_rng(42) + warmup = [100 + rng.normal(0, 1) for _ in range(n_warmup)] + all_close = warmup + close_prices + return pd.DataFrame({ + "open": all_close, + "high": [c + 0.5 for c in all_close], + "low": [c - 0.5 for c in all_close], + "close": all_close, + "volume": [1000.0] * len(all_close), + }) + + +# === war_risk_score tests === + +class TestWarRiskScore: + def test_single_market(self): + snapshots = [{"market_slug": "iran", "probability": 0.6}] + weights = {"iran": 1.0} + assert war_risk_score(snapshots, weights) == 0.6 + + def test_weighted_average(self): + snapshots = [ + {"market_slug": "iran", "probability": 0.8}, + {"market_slug": "ukraine", "probability": 0.2}, + ] + weights = {"iran": 0.6, "ukraine": 0.4} + expected = (0.8 * 0.6 + 0.2 * 0.4) / (0.6 + 0.4) + assert abs(war_risk_score(snapshots, weights) - expected) < 1e-10 + + def test_empty_snapshots(self): + assert war_risk_score([], {"iran": 1.0}) == 0.0 + + def test_unknown_slugs_ignored(self): + snapshots = [{"market_slug": "unknown", "probability": 0.9}] + weights = {"iran": 1.0} + assert war_risk_score(snapshots, weights) == 0.0 + + +# === rate_change_score tests === + +class TestRateChangeScore: + def test_rate_hike(self): + snapshots = [{"probability": 0.8}] + score = rate_change_score(snapshots) + assert score > 0 # hike expectation → positive + + def test_rate_cut(self): + snapshots = [{"probability": 0.2}] + score = rate_change_score(snapshots) + assert score < 0 # cut expectation → negative + + def test_neutral(self): + snapshots = [{"probability": 0.5}] + assert rate_change_score(snapshots) == 0.0 + + def test_empty(self): + assert rate_change_score([]) == 0.0 + + def test_range(self): + # Full hike certainty + assert rate_change_score([{"probability": 1.0}]) == 1.0 + # Full cut certainty + assert rate_change_score([{"probability": 0.0}]) == -1.0 + + +# === compute_regime tests === + +class TestComputeRegime: + def test_normal(self): + config = _pred_config() + assert compute_regime(0.2, 0.1, config) == PredictionRegime.NORMAL + + def test_high_risk(self): + config = _pred_config() + assert compute_regime(0.5, 0.0, config) == PredictionRegime.HIGH_RISK + + def test_crisis(self): + config = _pred_config() + assert compute_regime(0.8, 0.0, config) == PredictionRegime.CRISIS + + def test_crisis_trumps_rate(self): + """CRISIS takes priority over rate-based regimes.""" + config = _pred_config() + assert compute_regime(0.8, -0.5, config) == PredictionRegime.CRISIS + + def test_dovish(self): + config = _pred_config() + assert compute_regime(0.1, -0.5, config) == PredictionRegime.DOVISH_SHIFT + + def test_hawkish(self): + config = _pred_config() + assert compute_regime(0.1, 0.5, config) == PredictionRegime.HAWKISH_SHIFT + + def test_high_risk_trumps_rate(self): + """HIGH_RISK takes priority over DOVISH/HAWKISH.""" + config = _pred_config() + assert compute_regime(0.5, -0.5, config) == PredictionRegime.HIGH_RISK + + +# === funding_side_preference tests === + +class TestFundingSidePreference: + def test_dovish_prefers_long(self): + assert funding_side_preference(-0.5, 0.3) == "long" + + def test_hawkish_prefers_short(self): + assert funding_side_preference(0.5, 0.3) == "short" + + def test_neutral_no_preference(self): + assert funding_side_preference(0.1, 0.3) is None + + +# === Signal engine with regime === + +class TestSignalEngineRegime: + def test_crisis_blocks_entry(self): + engine = SignalEngine(_signal_config()) + df = _make_candles([100.0] * 30) + df = engine.compute_indicators(df) + # Force conditions that would normally trigger short entry + df.loc[df.index[-1], "zscore"] = 2.5 + df.loc[df.index[-1], "rsi"] = 75 + df.loc[df.index[-1], "bb_upper"] = 99.0 + df.loc[df.index[-1], "adx"] = 15 + result = engine.evaluate(df, position_side=None, prediction_regime=PredictionRegime.CRISIS) + assert result.signal == Signal.NONE + assert "crisis" in result.reason + + def test_crisis_allows_exit(self): + engine = SignalEngine(_signal_config()) + df = _make_candles([100.0] * 30) + df = engine.compute_indicators(df) + df.loc[df.index[-1], "zscore"] = 0.1 + result = engine.evaluate( + df, position_side="long", + prediction_regime=PredictionRegime.CRISIS, + ) + assert result.signal == Signal.CLOSE + + def test_high_risk_tightens_entry(self): + """Z-score of 2.1 should be enough for NORMAL but not HIGH_RISK.""" + engine = SignalEngine(_signal_config()) + df = _make_candles([100.0] * 30) + df = engine.compute_indicators(df) + df.loc[df.index[-1], "zscore"] = 2.1 + df.loc[df.index[-1], "rsi"] = 75 + df.loc[df.index[-1], "bb_upper"] = 99.0 + df.loc[df.index[-1], "adx"] = 15 + + # NORMAL: should trigger + normal_result = engine.evaluate( + df, position_side=None, + prediction_regime=PredictionRegime.NORMAL, + ) + assert normal_result.signal == Signal.SHORT + + # HIGH_RISK: z=2.1 < 2.5 threshold, should NOT trigger + high_risk_result = engine.evaluate( + df, position_side=None, + prediction_regime=PredictionRegime.HIGH_RISK, + ) + assert high_risk_result.signal == Signal.NONE + + def test_high_risk_earlier_exit(self): + """Z-score of 0.4 should hold in NORMAL but exit in HIGH_RISK.""" + engine = SignalEngine(_signal_config()) + df = _make_candles([100.0] * 30) + df = engine.compute_indicators(df) + df.loc[df.index[-1], "zscore"] = 0.4 + + # NORMAL: z=0.4 > 0.3 threshold, should NOT close + normal_result = engine.evaluate( + df, position_side="long", + prediction_regime=PredictionRegime.NORMAL, + ) + assert normal_result.signal == Signal.NONE + + # HIGH_RISK: z=0.4 < 0.5 threshold, should close + high_risk_result = engine.evaluate( + df, position_side="long", + prediction_regime=PredictionRegime.HIGH_RISK, + ) + assert high_risk_result.signal == Signal.CLOSE + + def test_prediction_regime_in_result(self): + engine = SignalEngine(_signal_config()) + df = _make_candles([100.0] * 30) + df = engine.compute_indicators(df) + result = engine.evaluate(df, prediction_regime=PredictionRegime.HIGH_RISK) + assert result.prediction_regime == PredictionRegime.HIGH_RISK + + def test_normal_regime_backward_compatible(self): + """Calling evaluate without regime args should behave identically to before.""" + engine = SignalEngine(_signal_config()) + df = _make_candles([100.0] * 30) + df = engine.compute_indicators(df) + result = engine.evaluate(df) + assert result.prediction_regime == PredictionRegime.NORMAL + + +# === Risk manager with regime === + +class TestRiskManagerRegime: + def test_normal_size_unchanged(self): + config = _bot_config() + db = Database(":memory:") + rm = RiskManager(config, db) + assert rm.compute_position_size(PredictionRegime.NORMAL) == 1005.0 + + def test_high_risk_reduces_size(self): + pred = _pred_config(position_size_reduction=0.5) + config = _bot_config(prediction=pred) + db = Database(":memory:") + rm = RiskManager(config, db) + size = rm.compute_position_size(PredictionRegime.HIGH_RISK) + assert size == 1005.0 * 0.5 + + def test_crisis_zero_size(self): + config = _bot_config() + db = Database(":memory:") + rm = RiskManager(config, db) + assert rm.compute_position_size(PredictionRegime.CRISIS) == 0.0 + + def test_no_prediction_config_normal_size(self): + """Without prediction config, HIGH_RISK doesn't reduce size.""" + config = _bot_config(prediction=None) + db = Database(":memory:") + rm = RiskManager(config, db) + # HIGH_RISK without prediction config → no reduction applied + size = rm.compute_position_size(PredictionRegime.HIGH_RISK) + assert size == 1005.0 + + def test_backward_compatible_no_args(self): + config = _bot_config() + db = Database(":memory:") + rm = RiskManager(config, db) + assert rm.compute_position_size() == 1005.0 + + +# === Database prediction methods === + +class TestDatabasePredictions: + def test_insert_and_get_latest(self): + db = Database(":memory:") + rows = [ + { + "source": "polymarket", "market_id": "abc123", "market_slug": "iran", + "category": "war_risk", "timestamp": 1000, "probability": 0.4, "volume_24h": 50000, + }, + { + "source": "polymarket", "market_id": "abc123", "market_slug": "iran", + "category": "war_risk", "timestamp": 2000, "probability": 0.6, "volume_24h": 55000, + }, + ] + db.insert_prediction_snapshots(rows) + latest = db.get_latest_predictions() + assert len(latest) == 1 + assert latest[0]["probability"] == 0.6 + assert latest[0]["timestamp"] == 2000 + + def test_insert_ignores_duplicates(self): + db = Database(":memory:") + row = { + "source": "kalshi", "market_id": "FED-1", "market_slug": "fed_rate", + "category": "rate_change", "timestamp": 1000, "probability": 0.5, "volume_24h": 100, + } + assert db.insert_prediction_snapshots([row]) == 1 + assert db.insert_prediction_snapshots([row]) == 0 + + def test_get_prediction_history(self): + db = Database(":memory:") + rows = [ + { + "source": "polymarket", "market_id": "abc", "market_slug": "iran", + "category": "war_risk", "timestamp": t, + "probability": 0.3 + t * 0.001, "volume_24h": 100, + } + for t in range(10) + ] + db.insert_prediction_snapshots(rows) + history = db.get_prediction_history("iran", start_time=5, limit=100) + assert len(history) == 5 + assert history[0]["timestamp"] == 5 + + def test_end_to_end_regime_transition(self): + """Insert snapshots → compute regime → verify signal changes.""" + db = Database(":memory:") + + # Insert war risk snapshots at 0.6 → should trigger HIGH_RISK + pred_cfg = _pred_config( + markets=[ + PredictionMarketDef( + slug="iran", source="polymarket", + market_id="abc", category="war_risk", + weight=1.0, + ), + ], + ) + db.insert_prediction_snapshots([{ + "source": "polymarket", "market_id": "abc", + "market_slug": "iran", "category": "war_risk", + "timestamp": 1000, "probability": 0.6, + "volume_24h": 50000, + }]) + + # Compute regime from DB (mirrors main._compute_prediction_state) + snapshots = db.get_latest_predictions() + war_snaps = [ + s for s in snapshots if s["category"] == "war_risk" + ] + w_risk = war_risk_score( + war_snaps, + {m.slug: m.weight for m in pred_cfg.markets}, + ) + regime = compute_regime(w_risk, 0.0, pred_cfg) + assert regime == PredictionRegime.HIGH_RISK + + # Build candles with z=2.1: passes NORMAL (2.0) but not + # HIGH_RISK (2.5) + engine = SignalEngine(_signal_config()) + df = _make_candles([100.0] * 30) + df = engine.compute_indicators(df) + df.loc[df.index[-1], "zscore"] = 2.1 + df.loc[df.index[-1], "rsi"] = 75 + df.loc[df.index[-1], "bb_upper"] = 99.0 + df.loc[df.index[-1], "adx"] = 15 + + normal = engine.evaluate( + df, prediction_regime=PredictionRegime.NORMAL, + ) + assert normal.signal == Signal.SHORT + + high_risk = engine.evaluate(df, prediction_regime=regime) + assert high_risk.signal == Signal.NONE + assert high_risk.prediction_regime == PredictionRegime.HIGH_RISK diff --git a/tests/test_risk.py b/tests/test_risk.py new file mode 100644 index 0000000..5f93b6d --- /dev/null +++ b/tests/test_risk.py @@ -0,0 +1,117 @@ +"""Tests for the risk manager.""" + +import time + +from perp_bot.config import ( + BotConfig, + DataConfig, + ExecutionConfig, + RiskConfig, + SignalConfig, + TradingConfig, +) +from perp_bot.data.db import Database +from perp_bot.risk.manager import RiskManager + + +def _test_config() -> BotConfig: + return BotConfig( + trading=TradingConfig( + symbols=["ETH"], leverage=3, + capital_usd=670.0, margin_usage_limit=0.5, + ), + signals=SignalConfig( + zscore_lookback=20, zscore_entry_threshold=2.0, + zscore_exit_threshold=0.3, zscore_stop_threshold=3.0, + bollinger_period=20, bollinger_std=2.0, + rsi_period=14, rsi_overbought=70, rsi_oversold=30, + adx_period=14, adx_threshold=25, + ), + risk=RiskConfig( + max_loss_per_trade_pct=0.03, daily_loss_limit_pct=0.08, + max_positions=1, cooldown_seconds=1800, + position_timeout_hours=24, + ), + data=DataConfig( + timeframes=["15m"], primary_timeframe="15m", + history_days=90, db_path=":memory:", + ), + execution=ExecutionConfig( + order_type="limit", taker_fallback_seconds=30, + use_server_side_stop=True, + ), + mode="paper", + ) + + +class TestRiskManager: + def test_entry_allowed_when_clean(self): + config = _test_config() + db = Database(":memory:") + rm = RiskManager(config, db) + check = rm.check_entry() + assert check.allowed + + def test_entry_blocked_max_positions(self): + config = _test_config() + db = Database(":memory:") + rm = RiskManager(config, db) + db.insert_trade({ + "symbol": "ETH", "side": "long", + "entry_time": int(time.time() * 1000), + "entry_price": 3000.0, "size_usd": 1000.0, "is_paper": 1, + }) + check = rm.check_entry() + assert not check.allowed + assert "max_positions" in check.reason + + def test_position_size(self): + config = _test_config() + db = Database(":memory:") + rm = RiskManager(config, db) + size = rm.compute_position_size() + # 670 * 0.5 * 3 = 1005 + assert size == 1005.0 + + def test_stop_loss_triggers(self): + config = _test_config() + db = Database(":memory:") + rm = RiskManager(config, db) + # Long position: entry 3000, current 2950 → loss = 50/3000 * 1005 = $16.75 + assert not rm.check_stop_loss(3000.0, 2950.0, "long") + # Entry 3000, current 2900 → loss = 100/3000 * 1005 = $33.50 > $20.10 (3% of 670) + assert rm.check_stop_loss(3000.0, 2900.0, "long") + + def test_cooldown_blocks_entry(self): + config = _test_config() + db = Database(":memory:") + rm = RiskManager(config, db) + rm.record_stop_loss() + check = rm.check_entry() + assert not check.allowed + assert "cooldown" in check.reason + + def test_cooldown_persists_across_restart(self): + config = _test_config() + db = Database(":memory:") + rm1 = RiskManager(config, db) + rm1.record_stop_loss() + # Verify it was persisted to DB + val = db.get_state("last_stop_loss_time_ms") + assert val is not None + + # Create a new RiskManager (simulates restart) + rm2 = RiskManager(config, db) + check = rm2.check_entry() + assert not check.allowed + assert "cooldown" in check.reason + + def test_expired_cooldown_not_restored(self): + config = _test_config() + db = Database(":memory:") + # Set a cooldown time that's already expired (2 hours ago) + old_time = int(time.time() * 1000) - 7200_000 + db.set_state("last_stop_loss_time_ms", str(old_time)) + rm = RiskManager(config, db) + check = rm.check_entry() + assert check.allowed diff --git a/tests/test_signals.py b/tests/test_signals.py new file mode 100644 index 0000000..446b387 --- /dev/null +++ b/tests/test_signals.py @@ -0,0 +1,82 @@ +"""Tests for the signal engine.""" + +import numpy as np +import pandas as pd + +from perp_bot.config import SignalConfig +from perp_bot.signals.engine import Signal, SignalEngine + + +def _default_config() -> SignalConfig: + return SignalConfig( + zscore_lookback=20, + zscore_entry_threshold=2.0, + zscore_exit_threshold=0.3, + zscore_stop_threshold=3.0, + bollinger_period=20, + bollinger_std=2.0, + rsi_period=14, + rsi_overbought=70, + rsi_oversold=30, + adx_period=14, + adx_threshold=25, + ) + + +def _make_candles(close_prices: list[float], n_warmup: int = 50) -> pd.DataFrame: + """Build a DataFrame with OHLCV from close prices, prepending warmup data.""" + rng = np.random.default_rng(42) + warmup = [100 + rng.normal(0, 1) for _ in range(n_warmup)] + all_close = warmup + close_prices + return pd.DataFrame({ + "open": all_close, + "high": [c + 0.5 for c in all_close], + "low": [c - 0.5 for c in all_close], + "close": all_close, + "volume": [1000.0] * len(all_close), + }) + + +class TestSignalEngine: + def test_no_signal_on_normal_data(self): + engine = SignalEngine(_default_config()) + df = _make_candles([100.0] * 30) + df = engine.compute_indicators(df) + result = engine.evaluate(df, position_side=None) + assert result.signal == Signal.NONE + + def test_close_on_mean_reversion(self): + """If Z-score is within exit threshold, position should close.""" + engine = SignalEngine(_default_config()) + # Prices that revert to mean + df = _make_candles([100.0] * 30) + df = engine.compute_indicators(df) + # Force zscore to be within exit threshold + df.loc[df.index[-1], "zscore"] = 0.1 + result = engine.evaluate(df, position_side="long") + assert result.signal == Signal.CLOSE + assert "mean_reversion" in result.reason + + def test_stop_loss_signal(self): + """Z-score exceeding stop threshold should trigger close.""" + engine = SignalEngine(_default_config()) + df = _make_candles([100.0] * 30) + df = engine.compute_indicators(df) + # Force extreme zscore against long position + df.loc[df.index[-1], "zscore"] = -3.5 + result = engine.evaluate(df, position_side="long") + assert result.signal == Signal.CLOSE + assert "stop" in result.reason + + def test_no_entry_during_trend(self): + """ADX above threshold should prevent entries.""" + engine = SignalEngine(_default_config()) + df = _make_candles([100.0] * 30) + df = engine.compute_indicators(df) + # Set up conditions that would trigger short entry, but ADX is high + df.loc[df.index[-1], "zscore"] = 2.5 + df.loc[df.index[-1], "rsi"] = 75 + df.loc[df.index[-1], "bb_upper"] = 99.0 # price above upper band + df.loc[df.index[-1], "adx"] = 35 # trending! + result = engine.evaluate(df, position_side=None) + assert result.signal == Signal.NONE diff --git a/tests/test_tui.py b/tests/test_tui.py new file mode 100644 index 0000000..9288782 --- /dev/null +++ b/tests/test_tui.py @@ -0,0 +1,63 @@ +"""Tests for TUI widgets — rendering and state updates.""" + +from __future__ import annotations + +from perp_bot.tui.widgets.header import HeaderWidget, _fmt_uptime +from perp_bot.tui.widgets.signals import _bar + + +class TestHeaderFormatting: + def test_fmt_uptime_minutes(self): + assert _fmt_uptime(125) == "2m 05s" + + def test_fmt_uptime_hours(self): + assert _fmt_uptime(3661) == "1h 01m" + + def test_fmt_uptime_zero(self): + assert _fmt_uptime(0) == "0m 00s" + + +class TestSignalBar: + def test_bar_full(self): + result = _bar(10.0, 0.0, 10.0, width=5) + assert result == "[=====]" + + def test_bar_empty(self): + result = _bar(0.0, 0.0, 10.0, width=5) + assert result == "[-----]" + + def test_bar_half(self): + result = _bar(5.0, 0.0, 10.0, width=10) + # [ + 10 chars + ] = 12 total + assert len(result) == 12 + assert result.startswith("[=====") + assert result.endswith("]") + + def test_bar_clamped_above(self): + result = _bar(999.0, 0.0, 10.0, width=5) + assert result == "[=====]" + + def test_bar_clamped_below(self): + result = _bar(-5.0, 0.0, 10.0, width=5) + assert result == "[-----]" + + +class TestWidgetStateUpdates: + """Test that widgets handle None state gracefully.""" + + def test_header_offline_state(self): + w = HeaderWidget() + # Should not raise when called before mount + w.update_state(None) + + def test_header_with_state(self): + w = HeaderWidget() + state = { + "mode": "paper", + "paused": False, + "ws_healthy": True, + "prediction_regime": "NORMAL", + "mid_prices": {"ETH": 3245.67}, + "uptime_seconds": 3600, + } + w.update_state(state) diff --git a/tests/test_ws_client.py b/tests/test_ws_client.py new file mode 100644 index 0000000..321a859 --- /dev/null +++ b/tests/test_ws_client.py @@ -0,0 +1,182 @@ +"""Tests for the WebSocket client — unit tests using mocked SDK.""" + +from __future__ import annotations + +from unittest.mock import patch + +from perp_bot.data.ws_client import WsClient + + +class TestWsClientMidPrices: + def test_get_mid_price_returns_none_before_subscribe(self): + with patch("perp_bot.data.ws_client.Info"): + ws = WsClient() + assert ws.get_mid_price("ETH") is None + + def test_mid_price_cache_updated_by_callback(self): + with patch("perp_bot.data.ws_client.Info") as MockInfo: + mock_info = MockInfo.return_value + mock_info.subscribe.return_value = 1 + + ws = WsClient() + ws.subscribe_mid_prices(["ETH", "BTC"]) + + # Extract the callback that was passed to subscribe + call_args = mock_info.subscribe.call_args + callback = call_args[0][1] + + # Simulate a message from the WebSocket + callback({"data": {"mids": {"ETH": "2500.5", "BTC": "95000.0"}}}) + + assert ws.get_mid_price("ETH") == 2500.5 + assert ws.get_mid_price("BTC") == 95000.0 + assert ws.get_mid_price("SOL") is None # not in data + + def test_mid_price_filters_to_watched_symbols(self): + with patch("perp_bot.data.ws_client.Info") as MockInfo: + mock_info = MockInfo.return_value + mock_info.subscribe.return_value = 1 + + ws = WsClient() + ws.subscribe_mid_prices(["ETH"]) + + callback = mock_info.subscribe.call_args[0][1] + callback({ + "data": {"mids": {"ETH": "2500.0", "BTC": "95000.0"}}, + }) + + assert ws.get_mid_price("ETH") == 2500.0 + # BTC not watched — should not be cached + assert ws.get_mid_price("BTC") is None + + +class TestWsClientCandles: + def test_candle_callback_receives_normalised_dict(self): + with patch("perp_bot.data.ws_client.Info") as MockInfo: + mock_info = MockInfo.return_value + mock_info.subscribe.return_value = 1 + + ws = WsClient() + received = [] + ws.subscribe_candles("ETH", "15m", received.append) + + callback = mock_info.subscribe.call_args[0][1] + callback({ + "data": { + "s": "ETH", "i": "15m", "t": 1000000, + "o": "2500", "h": "2510", "l": "2490", + "c": "2505", "v": "1000", "n": 50, + }, + }) + + assert len(received) == 1 + c = received[0] + assert c["symbol"] == "ETH" + assert c["timeframe"] == "15m" + assert c["close"] == 2505.0 + assert c["num_trades"] == 50 + + def test_malformed_candle_does_not_crash(self): + with patch("perp_bot.data.ws_client.Info") as MockInfo: + mock_info = MockInfo.return_value + mock_info.subscribe.return_value = 1 + + ws = WsClient() + received = [] + ws.subscribe_candles("ETH", "15m", received.append) + + callback = mock_info.subscribe.call_args[0][1] + # Missing keys — should log warning, not raise + callback({"data": {"s": "ETH"}}) + assert len(received) == 0 + + +class TestWsClientBbo: + def test_bbo_callback_receives_bid_ask(self): + with patch("perp_bot.data.ws_client.Info") as MockInfo: + mock_info = MockInfo.return_value + mock_info.subscribe.return_value = 1 + + ws = WsClient() + received = [] + ws.subscribe_bbo("ETH", lambda b, a: received.append((b, a))) + + callback = mock_info.subscribe.call_args[0][1] + callback({ + "data": { + "coin": "ETH", + "time": 1000000, + "bbo": [{"px": "2500.0"}, {"px": "2500.5"}], + }, + }) + + assert len(received) == 1 + assert received[0] == (2500.0, 2500.5) + + +class TestWsClientHealthCheck: + def test_healthy_when_no_subscriptions(self): + with patch("perp_bot.data.ws_client.Info"): + ws = WsClient() + assert ws.is_healthy() is True + + def test_healthy_after_recent_update(self): + with patch("perp_bot.data.ws_client.Info") as MockInfo: + mock_info = MockInfo.return_value + mock_info.subscribe.return_value = 1 + + ws = WsClient() + ws.subscribe_mid_prices(["ETH"]) + + callback = mock_info.subscribe.call_args[0][1] + callback({"data": {"mids": {"ETH": "2500.0"}}}) + + assert ws.is_healthy() is True + + def test_unhealthy_when_stale(self): + with patch("perp_bot.data.ws_client.Info") as MockInfo: + mock_info = MockInfo.return_value + mock_info.subscribe.return_value = 1 + + ws = WsClient() + ws._stale_threshold_seconds = 0.01 # Very short for testing + ws.subscribe_mid_prices(["ETH"]) + + callback = mock_info.subscribe.call_args[0][1] + callback({"data": {"mids": {"ETH": "2500.0"}}}) + + import time + time.sleep(0.02) + assert ws.is_healthy() is False + + def test_reconnect_recreates_info(self): + with patch("perp_bot.data.ws_client.Info") as MockInfo: + mock_info = MockInfo.return_value + mock_info.subscribe.return_value = 1 + + ws = WsClient() + ws.subscribe_mid_prices(["ETH"]) + assert len(ws._sub_ids) == 1 + + ws.reconnect() + + # Info should have been recreated (called twice total) + assert MockInfo.call_count == 2 + # Should have re-subscribed mid prices + assert len(ws._sub_ids) == 1 + + +class TestWsClientLifecycle: + def test_close_unsubscribes_all(self): + with patch("perp_bot.data.ws_client.Info") as MockInfo: + mock_info = MockInfo.return_value + mock_info.subscribe.return_value = 1 + + ws = WsClient() + ws.subscribe_mid_prices(["ETH"]) + ws.subscribe_candles("ETH", "15m", lambda x: None) + assert len(ws._sub_ids) == 2 + + ws.close() + assert mock_info.unsubscribe.call_count == 2 + assert len(ws._sub_ids) == 0 diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..0adfbc9 --- /dev/null +++ b/uv.lock @@ -0,0 +1,1293 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version < '3.14' and sys_platform == 'win32'", + "python_full_version < '3.14' and sys_platform == 'emscripten'", + "python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "bitarray" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/06/92fdc84448d324ab8434b78e65caf4fb4c6c90b4f8ad9bdd4c8021bfaf1e/bitarray-3.8.0.tar.gz", hash = "sha256:3eae38daffd77c9621ae80c16932eea3fb3a4af141fb7cc724d4ad93eff9210d", size = 151991, upload-time = "2025-11-02T21:41:15.117Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/a0/0c41d893eda756315491adfdbf9bc928aee3d377a7f97a8834d453aa5de1/bitarray-3.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2fcbe9b3a5996b417e030aa33a562e7e20dfc86271e53d7e841fc5df16268b8", size = 148575, upload-time = "2025-11-02T21:39:25.718Z" }, + { url = "https://files.pythonhosted.org/packages/0e/30/12ab2f4a4429bd844b419c37877caba93d676d18be71354fbbeb21d9f4cc/bitarray-3.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cd761d158f67e288fd0ebe00c3b158095ce80a4bc7c32b60c7121224003ba70d", size = 145454, upload-time = "2025-11-02T21:39:26.695Z" }, + { url = "https://files.pythonhosted.org/packages/26/58/314b3e3f219533464e120f0c51ac5123e7b1c1b91f725a4073fb70c5a858/bitarray-3.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c394a3f055b49f92626f83c1a0b6d6cd2c628f1ccd72481c3e3c6aa4695f3b20", size = 332949, upload-time = "2025-11-02T21:39:27.801Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ce/ca8c706bd8341c7a22dd92d2a528af71f7e5f4726085d93f81fd768cb03b/bitarray-3.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:969fd67de8c42affdb47b38b80f1eaa79ac0ef17d65407cdd931db1675315af1", size = 360599, upload-time = "2025-11-02T21:39:28.964Z" }, + { url = "https://files.pythonhosted.org/packages/ef/dc/aa181df85f933052d962804906b282acb433cb9318b08ec2aceb4ee34faf/bitarray-3.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:99d25aff3745c54e61ab340b98400c52ebec04290a62078155e0d7eb30380220", size = 371972, upload-time = "2025-11-02T21:39:30.228Z" }, + { url = "https://files.pythonhosted.org/packages/ff/d9/b805bfa158c7bcf4df0ac19b1be581b47e1ddb792c11023aed80a7058e78/bitarray-3.8.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e645b4c365d6f1f9e0799380ad6395268f3c3b898244a650aaeb8d9d27b74c35", size = 340303, upload-time = "2025-11-02T21:39:31.342Z" }, + { url = "https://files.pythonhosted.org/packages/1f/42/5308cc97ea929e30727292617a3a88293470166851e13c9e3f16f395da55/bitarray-3.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2fa23fdb3beab313950bbb49674e8a161e61449332d3997089fe3944953f1b77", size = 330494, upload-time = "2025-11-02T21:39:32.769Z" }, + { url = "https://files.pythonhosted.org/packages/4c/89/64f1596cb80433323efdbc8dcd0d6e57c40dfbe6ea3341623f34ec397edd/bitarray-3.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:165052a0e61c880f7093808a0c524ce1b3555bfa114c0dfb5c809cd07918a60d", size = 358123, upload-time = "2025-11-02T21:39:34.331Z" }, + { url = "https://files.pythonhosted.org/packages/27/fd/f3d49c5443b57087f888b5e118c8dd78bb7c8e8cfeeed250f8e92128a05f/bitarray-3.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:337c8cd46a4c6568d367ed676cbf2d7de16f890bb31dbb54c44c1d6bb6d4a1de", size = 356046, upload-time = "2025-11-02T21:39:35.449Z" }, + { url = "https://files.pythonhosted.org/packages/aa/db/1fd0b402bd2b47142e958b6930dbb9445235d03fa703c9a24caa6e576ae2/bitarray-3.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21ca6a47bf20db9e7ad74ca04b3d479e4d76109b68333eb23535553d2705339e", size = 336872, upload-time = "2025-11-02T21:39:36.891Z" }, + { url = "https://files.pythonhosted.org/packages/58/73/680b47718f1313b4538af479c4732eaca0aeda34d93fc5b869f87932d57d/bitarray-3.8.0-cp312-cp312-win32.whl", hash = "sha256:178c5a4c7fdfb5cd79e372ae7f675390e670f3732e5bc68d327e01a5b3ff8d55", size = 143025, upload-time = "2025-11-02T21:39:38.303Z" }, + { url = "https://files.pythonhosted.org/packages/f8/11/7792587c19c79a8283e8838f44709fa4338a8f7d2a3091dfd81c07ae89c7/bitarray-3.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:75a3b6e9c695a6570ea488db75b84bb592ff70a944957efa1c655867c575018b", size = 149969, upload-time = "2025-11-02T21:39:39.715Z" }, + { url = "https://files.pythonhosted.org/packages/9a/00/9df64b5d8a84e8e9ec392f6f9ce93f50626a5b301cb6c6b3fe3406454d66/bitarray-3.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:5591daf81313096909d973fb2612fccd87528fdfdd39f6478bdce54543178954", size = 146907, upload-time = "2025-11-02T21:39:40.815Z" }, + { url = "https://files.pythonhosted.org/packages/3e/35/480364d4baf1e34c79076750914664373f561c58abb5c31c35b3fae613ff/bitarray-3.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:18214bac86341f1cc413772e66447d6cca10981e2880b70ecaf4e826c04f95e9", size = 148582, upload-time = "2025-11-02T21:39:42.268Z" }, + { url = "https://files.pythonhosted.org/packages/5e/a8/718b95524c803937f4edbaaf6480f39c80f6ed189d61357b345e8361ffb6/bitarray-3.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:01c5f0dc080b0ebb432f7a68ee1e88a76bd34f6d89c9568fcec65fb16ed71f0e", size = 145433, upload-time = "2025-11-02T21:39:43.552Z" }, + { url = "https://files.pythonhosted.org/packages/03/66/4a10f30dc9e2e01e3b4ecd44a511219f98e63c86b0e0f704c90fac24059b/bitarray-3.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:86685fa04067f7175f9718489ae755f6acde03593a1a9ca89305554af40e14fd", size = 332986, upload-time = "2025-11-02T21:39:44.656Z" }, + { url = "https://files.pythonhosted.org/packages/53/25/4c08774d847f80a1166e4c704b4e0f1c417c0afe6306eae0bc5e70d35faa/bitarray-3.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:56896ceeffe25946c4010320629e2d858ca763cd8ded273c81672a5edbcb1e0a", size = 360634, upload-time = "2025-11-02T21:39:45.798Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8f/bf8ad26169ebd0b2746d5c7564db734453ca467f8aab87e9d43b0a794383/bitarray-3.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9858dcbc23ba7eaadcd319786b982278a1a2b2020720b19db43e309579ff76fb", size = 371992, upload-time = "2025-11-02T21:39:46.968Z" }, + { url = "https://files.pythonhosted.org/packages/a9/16/ce166754e7c9d10650e02914552fa637cf3b2591f7ed16632bbf6b783312/bitarray-3.8.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa7dec53c25f1949513457ef8b0ea1fb40e76c672cc4d2daa8ad3c8d6b73491a", size = 340315, upload-time = "2025-11-02T21:39:48.182Z" }, + { url = "https://files.pythonhosted.org/packages/de/2a/fbba3a106ddd260e84b9a624f730257c32ba51a8a029565248dfedfdf6f2/bitarray-3.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15a2eff91f54d2b1f573cca8ca6fb58763ce8fea80e7899ab028f3987ef71cd5", size = 330473, upload-time = "2025-11-02T21:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/68/97/56cf3c70196e7307ad32318a9d6ed969dbdc6a4534bbe429112fa7dfe42e/bitarray-3.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b1572ee0eb1967e71787af636bb7d1eb9c6735d5337762c450650e7f51844594", size = 358129, upload-time = "2025-11-02T21:39:51.189Z" }, + { url = "https://files.pythonhosted.org/packages/fd/be/afd391a5c0896d3339613321b2f94af853f29afc8bd3fbc327431244c642/bitarray-3.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5bfac7f236ba1a4d402644bdce47fb9db02a7cf3214a1f637d3a88390f9e5428", size = 356005, upload-time = "2025-11-02T21:39:52.355Z" }, + { url = "https://files.pythonhosted.org/packages/ae/08/a8e1a371babba29bad3378bb3a2cdca2b012170711e7fe1f22031a6b7b95/bitarray-3.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f0a55cf02d2cdd739b40ce10c09bbdd520e141217696add7a48b56e67bdfdfe6", size = 336862, upload-time = "2025-11-02T21:39:54.345Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/6dc1d0fdc06991c8dc3b1fcfe1ae49fbaced42064cd1b5f24278e73fe05f/bitarray-3.8.0-cp313-cp313-win32.whl", hash = "sha256:a2ba92f59e30ce915e9e79af37649432e3a212ddddf416d4d686b1b4825bcdb2", size = 143018, upload-time = "2025-11-02T21:39:56.361Z" }, + { url = "https://files.pythonhosted.org/packages/2e/72/76e13f5cd23b8b9071747909663ce3b02da24a5e7e22c35146338625db35/bitarray-3.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:1c8f2a5d8006db5a555e06f9437e76bf52537d3dfd130cb8ae2b30866aca32c9", size = 149977, upload-time = "2025-11-02T21:39:57.718Z" }, + { url = "https://files.pythonhosted.org/packages/01/37/60f336c32336cc3ec03b0c61076f16ea2f05d5371c8a56e802161d218b77/bitarray-3.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:50ddbe3a7b4b6ab96812f5a4d570f401a2cdb95642fd04c062f98939610bbeee", size = 146930, upload-time = "2025-11-02T21:39:59.308Z" }, + { url = "https://files.pythonhosted.org/packages/1b/b0/411327a6c7f6b2bead64bb06fe60b92e0344957ec1ab0645d5ccc25fdafe/bitarray-3.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8cbd4bfc933b33b85c43ef4c1f4d5e3e9d91975ea6368acf5fbac02bac06ea89", size = 148563, upload-time = "2025-11-02T21:40:01.006Z" }, + { url = "https://files.pythonhosted.org/packages/2a/bc/ff80d97c627d774f879da0ea93223adb1267feab7e07d5c17580ffe6d632/bitarray-3.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9d35d8f8a1c9ed4e2b08187b513f8a3c71958600129db3aa26d85ea3abfd1310", size = 145422, upload-time = "2025-11-02T21:40:02.535Z" }, + { url = "https://files.pythonhosted.org/packages/66/e7/b4cb6c5689aacd0a32f3aa8a507155eaa33528c63de2f182b60843fbf700/bitarray-3.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99f55e14e7c56f4fafe1343480c32b110ef03836c21ff7c48bae7add6818f77c", size = 332852, upload-time = "2025-11-02T21:40:03.645Z" }, + { url = "https://files.pythonhosted.org/packages/e7/91/fbd1b047e3e2f4b65590f289c8151df1d203d75b005f5aae4e072fe77d76/bitarray-3.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dfbe2aa45b273f49e715c5345d94874cb65a28482bf231af408891c260601b8d", size = 360801, upload-time = "2025-11-02T21:40:04.827Z" }, + { url = "https://files.pythonhosted.org/packages/ef/4a/63064c593627bac8754fdafcb5343999c93ab2aeb27bcd9d270a010abea5/bitarray-3.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:64af877116edf051375b45f0bda648143176a017b13803ec7b3a3111dc05f4c5", size = 371408, upload-time = "2025-11-02T21:40:05.985Z" }, + { url = "https://files.pythonhosted.org/packages/46/97/ddc07723767bdafd170f2ff6e173c940fa874192783ee464aa3c1dedf07d/bitarray-3.8.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cdfbb27f2c46bb5bbdcee147530cbc5ca8ab858d7693924e88e30ada21b2c5e2", size = 340033, upload-time = "2025-11-02T21:40:07.189Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1e/e1ea9f1146fd4af032817069ff118918d73e5de519854ce3860e2ed560ff/bitarray-3.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4d73d4948dcc5591d880db8933004e01f1dd2296df9de815354d53469beb26fe", size = 330774, upload-time = "2025-11-02T21:40:08.496Z" }, + { url = "https://files.pythonhosted.org/packages/cf/9f/8242296c124a48d1eab471fd0838aeb7ea9c6fd720302d99ab7855d3e6d3/bitarray-3.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:28a85b056c0eb7f5d864c0ceef07034117e8ebfca756f50648c71950a568ba11", size = 358337, upload-time = "2025-11-02T21:40:10.035Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6b/9095d75264c67d479f298c80802422464ce18c3cdd893252eeccf4997611/bitarray-3.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:79ec4498a545733ecace48d780d22407411b07403a2e08b9a4d7596c0b97ebd7", size = 355639, upload-time = "2025-11-02T21:40:11.485Z" }, + { url = "https://files.pythonhosted.org/packages/a0/af/c93c0ae5ef824136e90ac7ddf6cceccb1232f34240b2f55a922f874da9b4/bitarray-3.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:33af25c4ff7723363cb8404dfc2eefeab4110b654f6c98d26aba8a08c745d860", size = 336999, upload-time = "2025-11-02T21:40:12.709Z" }, + { url = "https://files.pythonhosted.org/packages/81/0f/72c951f5997b2876355d5e671f78dd2362493254876675cf22dbd24389ae/bitarray-3.8.0-cp314-cp314-win32.whl", hash = "sha256:2c3bb96b6026643ce24677650889b09073f60b9860a71765f843c99f9ab38b25", size = 142169, upload-time = "2025-11-02T21:40:14.031Z" }, + { url = "https://files.pythonhosted.org/packages/8a/55/ef1b4de8107bf13823da8756c20e1fbc9452228b4e837f46f6d9ddba3eb3/bitarray-3.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:847c7f61964225fc489fe1d49eda7e0e0d253e98862c012cecf845f9ad45cdf4", size = 148737, upload-time = "2025-11-02T21:40:15.436Z" }, + { url = "https://files.pythonhosted.org/packages/5f/26/bc0784136775024ac56cc67c0d6f9aa77a7770de7f82c3a7c9be11c217cd/bitarray-3.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:a2cb35a6efaa0e3623d8272471371a12c7e07b51a33e5efce9b58f655d864b4e", size = 146083, upload-time = "2025-11-02T21:40:17.135Z" }, + { url = "https://files.pythonhosted.org/packages/6e/64/57984e64264bf43d93a1809e645972771566a2d0345f4896b041ce20b000/bitarray-3.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:15e8d0597cc6e8496de6f4dea2a6880c57e1251502a7072f5631108a1aa28521", size = 149455, upload-time = "2025-11-02T21:40:18.558Z" }, + { url = "https://files.pythonhosted.org/packages/81/c0/0d5f2eaef1867f462f764bdb07d1e116c33a1bf052ea21889aefe4282f5b/bitarray-3.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8ffe660e963ae711cb9e2b8d8461c9b1ad6167823837fc17d59d5e539fb898fa", size = 146491, upload-time = "2025-11-02T21:40:19.665Z" }, + { url = "https://files.pythonhosted.org/packages/65/c6/bc1261f7a8862c0c59220a484464739e52235fd1e2afcb24d7f7d3fb5702/bitarray-3.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4779f356083c62e29b4198d290b7b17a39a69702d150678b7efff0fdddf494a8", size = 339721, upload-time = "2025-11-02T21:40:21.277Z" }, + { url = "https://files.pythonhosted.org/packages/81/d8/289ca55dd2939ea17b1108dc53bffc0fdc5160ba44f77502dfaae35d08c6/bitarray-3.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:025d133bf4ca8cf75f904eeb8ea946228d7c043231866143f31946a6f4dd0bf3", size = 367823, upload-time = "2025-11-02T21:40:22.463Z" }, + { url = "https://files.pythonhosted.org/packages/91/a2/61e7461ca9ac0fcb70f327a2e84b006996d2a840898e69037a39c87c6d06/bitarray-3.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:451f9958850ea98440d542278368c8d1e1ea821e2494b204570ba34a340759df", size = 377341, upload-time = "2025-11-02T21:40:23.789Z" }, + { url = "https://files.pythonhosted.org/packages/6c/87/4a0c9c8bdb13916d443e04d8f8542eef9190f31425da3c17c3478c40173f/bitarray-3.8.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6d79f659965290af60d6acc8e2716341865fe74609a7ede2a33c2f86ad893b8f", size = 344985, upload-time = "2025-11-02T21:40:25.261Z" }, + { url = "https://files.pythonhosted.org/packages/17/4c/ff9259b916efe53695b631772e5213699c738efc2471b5ffe273f4000994/bitarray-3.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fbf05678c2ae0064fb1b8de7e9e8f0fc30621b73c8477786dd0fb3868044a8c8", size = 336796, upload-time = "2025-11-02T21:40:26.942Z" }, + { url = "https://files.pythonhosted.org/packages/0f/4b/51b2468bbddbade5e2f3b8d5db08282c5b309e8687b0f02f75a8b5ff559c/bitarray-3.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:c396358023b876cff547ce87f4e8ff8a2280598873a137e8cc69e115262260b8", size = 365085, upload-time = "2025-11-02T21:40:28.224Z" }, + { url = "https://files.pythonhosted.org/packages/bf/79/53473bfc2e052c6dbb628cdc1b156be621c77aaeb715918358b01574be55/bitarray-3.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:ed3493a369fe849cce98542d7405c88030b355e4d2e113887cb7ecc86c205773", size = 361012, upload-time = "2025-11-02T21:40:29.635Z" }, + { url = "https://files.pythonhosted.org/packages/c4/b1/242bf2e44bfc69e73fa2b954b425d761a8e632f78ea31008f1c3cfad0854/bitarray-3.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c764fb167411d5afaef88138542a4bfa28bd5e5ded5e8e42df87cef965efd6e9", size = 340644, upload-time = "2025-11-02T21:40:31.089Z" }, + { url = "https://files.pythonhosted.org/packages/cf/01/12e5ecf30a5de28a32485f226cad4b8a546845f65f755ce0365057ab1e92/bitarray-3.8.0-cp314-cp314t-win32.whl", hash = "sha256:e12769d3adcc419e65860de946df8d2ed274932177ac1cdb05186e498aaa9149", size = 143630, upload-time = "2025-11-02T21:40:32.351Z" }, + { url = "https://files.pythonhosted.org/packages/b6/92/6b6ade587b08024a8a890b07724775d29da9cf7497be5c3cbe226185e463/bitarray-3.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0ca70ccf789446a6dfde40b482ec21d28067172cd1f8efd50d5548159fccad9e", size = 150250, upload-time = "2025-11-02T21:40:33.596Z" }, + { url = "https://files.pythonhosted.org/packages/ed/40/be3858ffed004e47e48a2cefecdbf9b950d41098b780f9dc3aa609a88351/bitarray-3.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2a3d1b05ffdd3e95687942ae7b13c63689f85d3f15c39b33329e3cb9ce6c015f", size = 147015, upload-time = "2025-11-02T21:40:35.064Z" }, +] + +[[package]] +name = "certifi" +version = "2026.2.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/60/e3bec1881450851b087e301bedc3daa9377a4d45f1c26aa90b0b235e38aa/charset_normalizer-3.4.6.tar.gz", hash = "sha256:1ae6b62897110aa7c79ea2f5dd38d1abca6db663687c0b1ad9aed6f6bae3d9d6", size = 143363, upload-time = "2026-03-15T18:53:25.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/62/c0815c992c9545347aeea7859b50dc9044d147e2e7278329c6e02ac9a616/charset_normalizer-3.4.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ef7fedc7a6ecbe99969cd09632516738a97eeb8bd7258bf8a0f23114c057dab", size = 295154, upload-time = "2026-03-15T18:50:50.88Z" }, + { url = "https://files.pythonhosted.org/packages/a8/37/bdca6613c2e3c58c7421891d80cc3efa1d32e882f7c4a7ee6039c3fc951a/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a4ea868bc28109052790eb2b52a9ab33f3aa7adc02f96673526ff47419490e21", size = 199191, upload-time = "2026-03-15T18:50:52.658Z" }, + { url = "https://files.pythonhosted.org/packages/6c/92/9934d1bbd69f7f398b38c5dae1cbf9cc672e7c34a4adf7b17c0a9c17d15d/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:836ab36280f21fc1a03c99cd05c6b7af70d2697e374c7af0b61ed271401a72a2", size = 218674, upload-time = "2026-03-15T18:50:54.102Z" }, + { url = "https://files.pythonhosted.org/packages/af/90/25f6ab406659286be929fd89ab0e78e38aa183fc374e03aa3c12d730af8a/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f1ce721c8a7dfec21fcbdfe04e8f68174183cf4e8188e0645e92aa23985c57ff", size = 215259, upload-time = "2026-03-15T18:50:55.616Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ef/79a463eb0fff7f96afa04c1d4c51f8fc85426f918db467854bfb6a569ce3/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e28d62a8fc7a1fa411c43bd65e346f3bce9716dc51b897fbe930c5987b402d5", size = 207276, upload-time = "2026-03-15T18:50:57.054Z" }, + { url = "https://files.pythonhosted.org/packages/f7/72/d0426afec4b71dc159fa6b4e68f868cd5a3ecd918fec5813a15d292a7d10/charset_normalizer-3.4.6-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:530d548084c4a9f7a16ed4a294d459b4f229db50df689bfe92027452452943a0", size = 195161, upload-time = "2026-03-15T18:50:58.686Z" }, + { url = "https://files.pythonhosted.org/packages/bf/18/c82b06a68bfcb6ce55e508225d210c7e6a4ea122bfc0748892f3dc4e8e11/charset_normalizer-3.4.6-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:30f445ae60aad5e1f8bdbb3108e39f6fbc09f4ea16c815c66578878325f8f15a", size = 203452, upload-time = "2026-03-15T18:51:00.196Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/0c25979b92f8adafdbb946160348d8d44aa60ce99afdc27df524379875cb/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ac2393c73378fea4e52aa56285a3d64be50f1a12395afef9cce47772f60334c2", size = 202272, upload-time = "2026-03-15T18:51:01.703Z" }, + { url = "https://files.pythonhosted.org/packages/2e/3d/7fea3e8fe84136bebbac715dd1221cc25c173c57a699c030ab9b8900cbb7/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:90ca27cd8da8118b18a52d5f547859cc1f8354a00cd1e8e5120df3e30d6279e5", size = 195622, upload-time = "2026-03-15T18:51:03.526Z" }, + { url = "https://files.pythonhosted.org/packages/57/8a/d6f7fd5cb96c58ef2f681424fbca01264461336d2a7fc875e4446b1f1346/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8e5a94886bedca0f9b78fecd6afb6629142fd2605aa70a125d49f4edc6037ee6", size = 220056, upload-time = "2026-03-15T18:51:05.269Z" }, + { url = "https://files.pythonhosted.org/packages/16/50/478cdda782c8c9c3fb5da3cc72dd7f331f031e7f1363a893cdd6ca0f8de0/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:695f5c2823691a25f17bc5d5ffe79fa90972cc34b002ac6c843bb8a1720e950d", size = 203751, upload-time = "2026-03-15T18:51:06.858Z" }, + { url = "https://files.pythonhosted.org/packages/75/fc/cc2fcac943939c8e4d8791abfa139f685e5150cae9f94b60f12520feaa9b/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:231d4da14bcd9301310faf492051bee27df11f2bc7549bc0bb41fef11b82daa2", size = 216563, upload-time = "2026-03-15T18:51:08.564Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b7/a4add1d9a5f68f3d037261aecca83abdb0ab15960a3591d340e829b37298/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a056d1ad2633548ca18ffa2f85c202cfb48b68615129143915b8dc72a806a923", size = 209265, upload-time = "2026-03-15T18:51:10.312Z" }, + { url = "https://files.pythonhosted.org/packages/6c/18/c094561b5d64a24277707698e54b7f67bd17a4f857bbfbb1072bba07c8bf/charset_normalizer-3.4.6-cp312-cp312-win32.whl", hash = "sha256:c2274ca724536f173122f36c98ce188fd24ce3dad886ec2b7af859518ce008a4", size = 144229, upload-time = "2026-03-15T18:51:11.694Z" }, + { url = "https://files.pythonhosted.org/packages/ab/20/0567efb3a8fd481b8f34f739ebddc098ed062a59fed41a8d193a61939e8f/charset_normalizer-3.4.6-cp312-cp312-win_amd64.whl", hash = "sha256:c8ae56368f8cc97c7e40a7ee18e1cedaf8e780cd8bc5ed5ac8b81f238614facb", size = 154277, upload-time = "2026-03-15T18:51:13.004Z" }, + { url = "https://files.pythonhosted.org/packages/15/57/28d79b44b51933119e21f65479d0864a8d5893e494cf5daab15df0247c17/charset_normalizer-3.4.6-cp312-cp312-win_arm64.whl", hash = "sha256:899d28f422116b08be5118ef350c292b36fc15ec2daeb9ea987c89281c7bb5c4", size = 142817, upload-time = "2026-03-15T18:51:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/1e/1d/4fdabeef4e231153b6ed7567602f3b68265ec4e5b76d6024cf647d43d981/charset_normalizer-3.4.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:11afb56037cbc4b1555a34dd69151e8e069bee82e613a73bef6e714ce733585f", size = 294823, upload-time = "2026-03-15T18:51:15.755Z" }, + { url = "https://files.pythonhosted.org/packages/47/7b/20e809b89c69d37be748d98e84dce6820bf663cf19cf6b942c951a3e8f41/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:423fb7e748a08f854a08a222b983f4df1912b1daedce51a72bd24fe8f26a1843", size = 198527, upload-time = "2026-03-15T18:51:17.177Z" }, + { url = "https://files.pythonhosted.org/packages/37/a6/4f8d27527d59c039dce6f7622593cdcd3d70a8504d87d09eb11e9fdc6062/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d73beaac5e90173ac3deb9928a74763a6d230f494e4bfb422c217a0ad8e629bf", size = 218388, upload-time = "2026-03-15T18:51:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9b/4770ccb3e491a9bacf1c46cc8b812214fe367c86a96353ccc6daf87b01ec/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d60377dce4511655582e300dc1e5a5f24ba0cb229005a1d5c8d0cb72bb758ab8", size = 214563, upload-time = "2026-03-15T18:51:20.374Z" }, + { url = "https://files.pythonhosted.org/packages/2b/58/a199d245894b12db0b957d627516c78e055adc3a0d978bc7f65ddaf7c399/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:530e8cebeea0d76bdcf93357aa5e41336f48c3dc709ac52da2bb167c5b8271d9", size = 206587, upload-time = "2026-03-15T18:51:21.807Z" }, + { url = "https://files.pythonhosted.org/packages/7e/70/3def227f1ec56f5c69dfc8392b8bd63b11a18ca8178d9211d7cc5e5e4f27/charset_normalizer-3.4.6-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:a26611d9987b230566f24a0a125f17fe0de6a6aff9f25c9f564aaa2721a5fb88", size = 194724, upload-time = "2026-03-15T18:51:23.508Z" }, + { url = "https://files.pythonhosted.org/packages/58/ab/9318352e220c05efd31c2779a23b50969dc94b985a2efa643ed9077bfca5/charset_normalizer-3.4.6-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:34315ff4fc374b285ad7f4a0bf7dcbfe769e1b104230d40f49f700d4ab6bbd84", size = 202956, upload-time = "2026-03-15T18:51:25.239Z" }, + { url = "https://files.pythonhosted.org/packages/75/13/f3550a3ac25b70f87ac98c40d3199a8503676c2f1620efbf8d42095cfc40/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ddd609f9e1af8c7bd6e2aca279c931aefecd148a14402d4e368f3171769fd", size = 201923, upload-time = "2026-03-15T18:51:26.682Z" }, + { url = "https://files.pythonhosted.org/packages/1b/db/c5c643b912740b45e8eec21de1bbab8e7fc085944d37e1e709d3dcd9d72f/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:80d0a5615143c0b3225e5e3ef22c8d5d51f3f72ce0ea6fb84c943546c7b25b6c", size = 195366, upload-time = "2026-03-15T18:51:28.129Z" }, + { url = "https://files.pythonhosted.org/packages/5a/67/3b1c62744f9b2448443e0eb160d8b001c849ec3fef591e012eda6484787c/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:92734d4d8d187a354a556626c221cd1a892a4e0802ccb2af432a1d85ec012194", size = 219752, upload-time = "2026-03-15T18:51:29.556Z" }, + { url = "https://files.pythonhosted.org/packages/f6/98/32ffbaf7f0366ffb0445930b87d103f6b406bc2c271563644bde8a2b1093/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:613f19aa6e082cf96e17e3ffd89383343d0d589abda756b7764cf78361fd41dc", size = 203296, upload-time = "2026-03-15T18:51:30.921Z" }, + { url = "https://files.pythonhosted.org/packages/41/12/5d308c1bbe60cabb0c5ef511574a647067e2a1f631bc8634fcafaccd8293/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:2b1a63e8224e401cafe7739f77efd3f9e7f5f2026bda4aead8e59afab537784f", size = 215956, upload-time = "2026-03-15T18:51:32.399Z" }, + { url = "https://files.pythonhosted.org/packages/53/e9/5f85f6c5e20669dbe56b165c67b0260547dea97dba7e187938833d791687/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6cceb5473417d28edd20c6c984ab6fee6c6267d38d906823ebfe20b03d607dc2", size = 208652, upload-time = "2026-03-15T18:51:34.214Z" }, + { url = "https://files.pythonhosted.org/packages/f1/11/897052ea6af56df3eef3ca94edafee410ca699ca0c7b87960ad19932c55e/charset_normalizer-3.4.6-cp313-cp313-win32.whl", hash = "sha256:d7de2637729c67d67cf87614b566626057e95c303bc0a55ffe391f5205e7003d", size = 143940, upload-time = "2026-03-15T18:51:36.15Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5c/724b6b363603e419829f561c854b87ed7c7e31231a7908708ac086cdf3e2/charset_normalizer-3.4.6-cp313-cp313-win_amd64.whl", hash = "sha256:572d7c822caf521f0525ba1bce1a622a0b85cf47ffbdae6c9c19e3b5ac3c4389", size = 154101, upload-time = "2026-03-15T18:51:37.876Z" }, + { url = "https://files.pythonhosted.org/packages/01/a5/7abf15b4c0968e47020f9ca0935fb3274deb87cb288cd187cad92e8cdffd/charset_normalizer-3.4.6-cp313-cp313-win_arm64.whl", hash = "sha256:a4474d924a47185a06411e0064b803c68be044be2d60e50e8bddcc2649957c1f", size = 143109, upload-time = "2026-03-15T18:51:39.565Z" }, + { url = "https://files.pythonhosted.org/packages/25/6f/ffe1e1259f384594063ea1869bfb6be5cdb8bc81020fc36c3636bc8302a1/charset_normalizer-3.4.6-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:9cc6e6d9e571d2f863fa77700701dae73ed5f78881efc8b3f9a4398772ff53e8", size = 294458, upload-time = "2026-03-15T18:51:41.134Z" }, + { url = "https://files.pythonhosted.org/packages/56/60/09bb6c13a8c1016c2ed5c6a6488e4ffef506461aa5161662bd7636936fb1/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef5960d965e67165d75b7c7ffc60a83ec5abfc5c11b764ec13ea54fbef8b4421", size = 199277, upload-time = "2026-03-15T18:51:42.953Z" }, + { url = "https://files.pythonhosted.org/packages/00/50/dcfbb72a5138bbefdc3332e8d81a23494bf67998b4b100703fd15fa52d81/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b3694e3f87f8ac7ce279d4355645b3c878d24d1424581b46282f24b92f5a4ae2", size = 218758, upload-time = "2026-03-15T18:51:44.339Z" }, + { url = "https://files.pythonhosted.org/packages/03/b3/d79a9a191bb75f5aa81f3aaaa387ef29ce7cb7a9e5074ba8ea095cc073c2/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5d11595abf8dd942a77883a39d81433739b287b6aa71620f15164f8096221b30", size = 215299, upload-time = "2026-03-15T18:51:45.871Z" }, + { url = "https://files.pythonhosted.org/packages/76/7e/bc8911719f7084f72fd545f647601ea3532363927f807d296a8c88a62c0d/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7bda6eebafd42133efdca535b04ccb338ab29467b3f7bf79569883676fc628db", size = 206811, upload-time = "2026-03-15T18:51:47.308Z" }, + { url = "https://files.pythonhosted.org/packages/e2/40/c430b969d41dda0c465aa36cc7c2c068afb67177bef50905ac371b28ccc7/charset_normalizer-3.4.6-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:bbc8c8650c6e51041ad1be191742b8b421d05bbd3410f43fa2a00c8db87678e8", size = 193706, upload-time = "2026-03-15T18:51:48.849Z" }, + { url = "https://files.pythonhosted.org/packages/48/15/e35e0590af254f7df984de1323640ef375df5761f615b6225ba8deb9799a/charset_normalizer-3.4.6-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:22c6f0c2fbc31e76c3b8a86fba1a56eda6166e238c29cdd3d14befdb4a4e4815", size = 202706, upload-time = "2026-03-15T18:51:50.257Z" }, + { url = "https://files.pythonhosted.org/packages/5e/bd/f736f7b9cc5e93a18b794a50346bb16fbfd6b37f99e8f306f7951d27c17c/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7edbed096e4a4798710ed6bc75dcaa2a21b68b6c356553ac4823c3658d53743a", size = 202497, upload-time = "2026-03-15T18:51:52.012Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ba/2cc9e3e7dfdf7760a6ed8da7446d22536f3d0ce114ac63dee2a5a3599e62/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:7f9019c9cb613f084481bd6a100b12e1547cf2efe362d873c2e31e4035a6fa43", size = 193511, upload-time = "2026-03-15T18:51:53.723Z" }, + { url = "https://files.pythonhosted.org/packages/9e/cb/5be49b5f776e5613be07298c80e1b02a2d900f7a7de807230595c85a8b2e/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:58c948d0d086229efc484fe2f30c2d382c86720f55cd9bc33591774348ad44e0", size = 220133, upload-time = "2026-03-15T18:51:55.333Z" }, + { url = "https://files.pythonhosted.org/packages/83/43/99f1b5dad345accb322c80c7821071554f791a95ee50c1c90041c157ae99/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:419a9d91bd238052642a51938af8ac05da5b3343becde08d5cdeab9046df9ee1", size = 203035, upload-time = "2026-03-15T18:51:56.736Z" }, + { url = "https://files.pythonhosted.org/packages/87/9a/62c2cb6a531483b55dddff1a68b3d891a8b498f3ca555fbcf2978e804d9d/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5273b9f0b5835ff0350c0828faea623c68bfa65b792720c453e22b25cc72930f", size = 216321, upload-time = "2026-03-15T18:51:58.17Z" }, + { url = "https://files.pythonhosted.org/packages/6e/79/94a010ff81e3aec7c293eb82c28f930918e517bc144c9906a060844462eb/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:0e901eb1049fdb80f5bd11ed5ea1e498ec423102f7a9b9e4645d5b8204ff2815", size = 208973, upload-time = "2026-03-15T18:51:59.998Z" }, + { url = "https://files.pythonhosted.org/packages/2a/57/4ecff6d4ec8585342f0c71bc03efaa99cb7468f7c91a57b105bcd561cea8/charset_normalizer-3.4.6-cp314-cp314-win32.whl", hash = "sha256:b4ff1d35e8c5bd078be89349b6f3a845128e685e751b6ea1169cf2160b344c4d", size = 144610, upload-time = "2026-03-15T18:52:02.213Z" }, + { url = "https://files.pythonhosted.org/packages/80/94/8434a02d9d7f168c25767c64671fead8d599744a05d6a6c877144c754246/charset_normalizer-3.4.6-cp314-cp314-win_amd64.whl", hash = "sha256:74119174722c4349af9708993118581686f343adc1c8c9c007d59be90d077f3f", size = 154962, upload-time = "2026-03-15T18:52:03.658Z" }, + { url = "https://files.pythonhosted.org/packages/46/4c/48f2cdbfd923026503dfd67ccea45c94fd8fe988d9056b468579c66ed62b/charset_normalizer-3.4.6-cp314-cp314-win_arm64.whl", hash = "sha256:e5bcc1a1ae744e0bb59641171ae53743760130600da8db48cbb6e4918e186e4e", size = 143595, upload-time = "2026-03-15T18:52:05.123Z" }, + { url = "https://files.pythonhosted.org/packages/31/93/8878be7569f87b14f1d52032946131bcb6ebbd8af3e20446bc04053dc3f1/charset_normalizer-3.4.6-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:ad8faf8df23f0378c6d527d8b0b15ea4a2e23c89376877c598c4870d1b2c7866", size = 314828, upload-time = "2026-03-15T18:52:06.831Z" }, + { url = "https://files.pythonhosted.org/packages/06/b6/fae511ca98aac69ecc35cde828b0a3d146325dd03d99655ad38fc2cc3293/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f5ea69428fa1b49573eef0cc44a1d43bebd45ad0c611eb7d7eac760c7ae771bc", size = 208138, upload-time = "2026-03-15T18:52:08.239Z" }, + { url = "https://files.pythonhosted.org/packages/54/57/64caf6e1bf07274a1e0b7c160a55ee9e8c9ec32c46846ce59b9c333f7008/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:06a7e86163334edfc5d20fe104db92fcd666e5a5df0977cb5680a506fe26cc8e", size = 224679, upload-time = "2026-03-15T18:52:10.043Z" }, + { url = "https://files.pythonhosted.org/packages/aa/cb/9ff5a25b9273ef160861b41f6937f86fae18b0792fe0a8e75e06acb08f1d/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e1f6e2f00a6b8edb562826e4632e26d063ac10307e80f7461f7de3ad8ef3f077", size = 223475, upload-time = "2026-03-15T18:52:11.854Z" }, + { url = "https://files.pythonhosted.org/packages/fc/97/440635fc093b8d7347502a377031f9605a1039c958f3cd18dcacffb37743/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95b52c68d64c1878818687a473a10547b3292e82b6f6fe483808fb1468e2f52f", size = 215230, upload-time = "2026-03-15T18:52:13.325Z" }, + { url = "https://files.pythonhosted.org/packages/cd/24/afff630feb571a13f07c8539fbb502d2ab494019492aaffc78ef41f1d1d0/charset_normalizer-3.4.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:7504e9b7dc05f99a9bbb4525c67a2c155073b44d720470a148b34166a69c054e", size = 199045, upload-time = "2026-03-15T18:52:14.752Z" }, + { url = "https://files.pythonhosted.org/packages/e5/17/d1399ecdaf7e0498c327433e7eefdd862b41236a7e484355b8e0e5ebd64b/charset_normalizer-3.4.6-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:172985e4ff804a7ad08eebec0a1640ece87ba5041d565fff23c8f99c1f389484", size = 211658, upload-time = "2026-03-15T18:52:16.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/38/16baa0affb957b3d880e5ac2144caf3f9d7de7bc4a91842e447fbb5e8b67/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4be9f4830ba8741527693848403e2c457c16e499100963ec711b1c6f2049b7c7", size = 210769, upload-time = "2026-03-15T18:52:17.782Z" }, + { url = "https://files.pythonhosted.org/packages/05/34/c531bc6ac4c21da9ddfddb3107be2287188b3ea4b53b70fc58f2a77ac8d8/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:79090741d842f564b1b2827c0b82d846405b744d31e84f18d7a7b41c20e473ff", size = 201328, upload-time = "2026-03-15T18:52:19.553Z" }, + { url = "https://files.pythonhosted.org/packages/fa/73/a5a1e9ca5f234519c1953608a03fe109c306b97fdfb25f09182babad51a7/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:87725cfb1a4f1f8c2fc9890ae2f42094120f4b44db9360be5d99a4c6b0e03a9e", size = 225302, upload-time = "2026-03-15T18:52:21.043Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f6/cd782923d112d296294dea4bcc7af5a7ae0f86ab79f8fefbda5526b6cfc0/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:fcce033e4021347d80ed9c66dcf1e7b1546319834b74445f561d2e2221de5659", size = 211127, upload-time = "2026-03-15T18:52:22.491Z" }, + { url = "https://files.pythonhosted.org/packages/0e/c5/0b6898950627af7d6103a449b22320372c24c6feda91aa24e201a478d161/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:ca0276464d148c72defa8bb4390cce01b4a0e425f3b50d1435aa6d7a18107602", size = 222840, upload-time = "2026-03-15T18:52:24.113Z" }, + { url = "https://files.pythonhosted.org/packages/7d/25/c4bba773bef442cbdc06111d40daa3de5050a676fa26e85090fc54dd12f0/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:197c1a244a274bb016dd8b79204850144ef77fe81c5b797dc389327adb552407", size = 216890, upload-time = "2026-03-15T18:52:25.541Z" }, + { url = "https://files.pythonhosted.org/packages/35/1a/05dacadb0978da72ee287b0143097db12f2e7e8d3ffc4647da07a383b0b7/charset_normalizer-3.4.6-cp314-cp314t-win32.whl", hash = "sha256:2a24157fa36980478dd1770b585c0f30d19e18f4fb0c47c13aa568f871718579", size = 155379, upload-time = "2026-03-15T18:52:27.05Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7a/d269d834cb3a76291651256f3b9a5945e81d0a49ab9f4a498964e83c0416/charset_normalizer-3.4.6-cp314-cp314t-win_amd64.whl", hash = "sha256:cd5e2801c89992ed8c0a3f0293ae83c159a60d9a5d685005383ef4caca77f2c4", size = 169043, upload-time = "2026-03-15T18:52:28.502Z" }, + { url = "https://files.pythonhosted.org/packages/23/06/28b29fba521a37a8932c6a84192175c34d49f84a6d4773fa63d05f9aff22/charset_normalizer-3.4.6-cp314-cp314t-win_arm64.whl", hash = "sha256:47955475ac79cc504ef2704b192364e51d0d473ad452caedd0002605f780101c", size = 148523, upload-time = "2026-03-15T18:52:29.956Z" }, + { url = "https://files.pythonhosted.org/packages/2a/68/687187c7e26cb24ccbd88e5069f5ef00eba804d36dde11d99aad0838ab45/charset_normalizer-3.4.6-py3-none-any.whl", hash = "sha256:947cf925bc916d90adba35a64c82aace04fa39b46b52d4630ece166655905a69", size = 61455, upload-time = "2026-03-15T18:53:23.833Z" }, +] + +[[package]] +name = "ckzg" +version = "2.1.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/12/44/fdb579a0d035a1e510511e3c3b9ca98ba2ea240a24f112b1882478bfc2ff/ckzg-2.1.7.tar.gz", hash = "sha256:a0c61c5fd573af0267bcb435ef0f499911289ceb05e863480779ea284a3bb928", size = 1127878, upload-time = "2026-03-11T14:11:13.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/09/40/8c2d610066a2efd4048553ff12aa832c916822ec9c888ca924565e520a7b/ckzg-2.1.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:126050ffb23b504c34c4c2073c54bd8b42f4a3034798a631c9e85911e26caf47", size = 96386, upload-time = "2026-03-11T14:10:23.532Z" }, + { url = "https://files.pythonhosted.org/packages/29/b6/092bd10eb35e9fe3d316410791d9055039c5dd29caf03c72cc86fce45624/ckzg-2.1.7-cp312-cp312-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:936b4bffc1a6fa2bf261eb5e673f4fcc59feaf70c6c07aac1b02e3e1f942fdb6", size = 180447, upload-time = "2026-03-11T14:10:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/53/7e/f1c15ec078bee7660a2cafa103c4efdf9686256a348565ef6a1cb70ff1c4/ckzg-2.1.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:902c03b689d13684cd8b61c8e1b7a65528fdd5e1ab9d76338ddb2e902b5fd1ea", size = 166242, upload-time = "2026-03-11T14:10:25.671Z" }, + { url = "https://files.pythonhosted.org/packages/bf/de/c22535e16163a836f76d7c3606a6e579a7a02862b4797b832cd6de5f6a1d/ckzg-2.1.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e635e5e1f6ff8ffc05d2961ccfc4b3e8c95e50c87d9765b2dfe09e32474c402", size = 176015, upload-time = "2026-03-11T14:10:26.976Z" }, + { url = "https://files.pythonhosted.org/packages/af/4f/56c303eab20d92e5d140f96881c8c7e2eaa05976d6cb887ab574d780d09d/ckzg-2.1.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cbedb5e4732d37c87fe45a2b25891d00f434d4e0f4dd612daa034fe2011e5939", size = 173682, upload-time = "2026-03-11T14:10:27.857Z" }, + { url = "https://files.pythonhosted.org/packages/85/0a/0feb878383e9c83d6dcd760b8de2f3095546cc09b1717ae65cbb47f90b20/ckzg-2.1.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:665d0094466b576e390b4a5e1caf199f1165841e99bf7b3cc65117f12ba4ea74", size = 188873, upload-time = "2026-03-11T14:10:28.85Z" }, + { url = "https://files.pythonhosted.org/packages/48/29/c2eb07882465c32478e575334311ad6cea21c5d76d54da6c900dd6cb8e62/ckzg-2.1.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f5d4d1fb20eda15b901fc393a4bfd39b1be661008218f9f0db47d4e143d25d62", size = 183566, upload-time = "2026-03-11T14:10:29.777Z" }, + { url = "https://files.pythonhosted.org/packages/c8/48/4d1f5c470cc6eb73aaba30125e6fb62759ce69bbdb2a74c160f69f601236/ckzg-2.1.7-cp312-cp312-win_amd64.whl", hash = "sha256:b580f65e61f3d89a99bfeeac0e256cf68c63d29df1c1e5e788785085083a303b", size = 99811, upload-time = "2026-03-11T14:10:30.719Z" }, + { url = "https://files.pythonhosted.org/packages/87/32/495600f43a277bcb413d08f23f594dc548ac0d7927ad1ce7db28e58afadd/ckzg-2.1.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e23e10b227209bfae11f6f1f88ff2a8b0a2232248f985321e5e844c9dd7a4c5f", size = 96394, upload-time = "2026-03-11T14:10:31.535Z" }, + { url = "https://files.pythonhosted.org/packages/e4/fe/c3708cfdbc228298c0f5fa4d08ceee7cc01cb7f7d105bfc9ebc68c39060d/ckzg-2.1.7-cp313-cp313-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:382c015860e7159b1ec5a85642127d4b55f6b36eef5f73d664fc409d26a3b367", size = 180484, upload-time = "2026-03-11T14:10:32.418Z" }, + { url = "https://files.pythonhosted.org/packages/28/55/d689769ea0f9b2c2c16d8390f4c3cf7cd7dea0df68542b2a435c341df0b0/ckzg-2.1.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6666801e925d2f1d7c045fe943c1265c39b90444f88288735cc1245c4fa8018a", size = 166301, upload-time = "2026-03-11T14:10:33.363Z" }, + { url = "https://files.pythonhosted.org/packages/16/ff/e172b4ae4bef05bf88bb8f27d2b9858b56c9984ad1708eeef82ac787fe7c/ckzg-2.1.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e823de2fd4103abc4b51512d27aa3e14107e84718e11a596eefcddc6f313b25", size = 176052, upload-time = "2026-03-11T14:10:34.621Z" }, + { url = "https://files.pythonhosted.org/packages/61/0a/dcf28e0126e5a6f8f8b7505b4b5b637ca25e1095272fbee73f8967e3a545/ckzg-2.1.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a65c7be0bb72a159c5a4b98cc3c759b868274697de11d8248f5dde32f2400776", size = 173691, upload-time = "2026-03-11T14:10:35.577Z" }, + { url = "https://files.pythonhosted.org/packages/2a/d2/fe404ad0bd79aaeb1e75fb4981d21e37364e59517813f7f085914026a7f6/ckzg-2.1.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62523b275f74f2729fc788d02b26e447dabfd7706ffe8882ee96d776db54b920", size = 188909, upload-time = "2026-03-11T14:10:36.798Z" }, + { url = "https://files.pythonhosted.org/packages/55/d7/ef2d30c88270ab1a0daffa8a0f8453b72035569d3295ad3dcaba9b5250a6/ckzg-2.1.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5d998cd6d0f8e37e969c96315ac8c1e87fcf581cf27ab970bd33e62dc1c43357", size = 183597, upload-time = "2026-03-11T14:10:37.812Z" }, + { url = "https://files.pythonhosted.org/packages/93/77/1e04840c866284bec3489154caec22855829b0c2d028bd1de771655175e3/ckzg-2.1.7-cp313-cp313-win_amd64.whl", hash = "sha256:d48b75fca9e928b2ea288fc079b0522fb91af5742b5eb4f2fdea4fc33a1b7b4e", size = 99808, upload-time = "2026-03-11T14:10:38.701Z" }, + { url = "https://files.pythonhosted.org/packages/24/ab/11eb63c520cae074195b05cd644bf45be061b910b5c97abdaae02876a50e/ckzg-2.1.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c19b98f29f4459587e1ec4cce3e2e10963a6974293cf3143d13ce43c30542806", size = 96400, upload-time = "2026-03-11T14:10:39.59Z" }, + { url = "https://files.pythonhosted.org/packages/31/7d/3678cbb22f31a50dd354b9d3efcb9366dd5b97cdddbf270213a66b03ad41/ckzg-2.1.7-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:d31583a24cf8166d81c36f1e424de1f343c1d604dbc8c68d938a908236ae11a3", size = 180492, upload-time = "2026-03-11T14:10:40.766Z" }, + { url = "https://files.pythonhosted.org/packages/48/a5/355f898c75e19ac6426798c28a9767bdc734bebb40c4cd15572f644745ba/ckzg-2.1.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:baf6ac696e6a40b33ddb57aa0729d5e39230bd13fa4f1e40fe9236e8920d83fe", size = 166322, upload-time = "2026-03-11T14:10:41.752Z" }, + { url = "https://files.pythonhosted.org/packages/ff/f5/7ffc482dc628c43d9c7a1b19392e1a920ccfd1da8d2e07d7dcc79c3e3bd2/ckzg-2.1.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8bbdf89f9327e442415a810beca692729c35664e154a6830296124a5c6f05470", size = 176061, upload-time = "2026-03-11T14:10:42.649Z" }, + { url = "https://files.pythonhosted.org/packages/26/56/f79ee2a177b4522fe47709e9f7e48407cd54a63c3d7bc1ca3002c705b3a7/ckzg-2.1.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:716c2dde0a91c0095797b843f78a6425e20a3d8945ecb4f90550b5c681b6be05", size = 173746, upload-time = "2026-03-11T14:10:43.657Z" }, + { url = "https://files.pythonhosted.org/packages/b9/a7/95b160707b22161817245de8b9e44ea143b9a2083b0c625e5e5cd4a2e20a/ckzg-2.1.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:2a9f1a05ed44512b80581e47918b1f4546974e8e924ee0e8de84ab32de197326", size = 188923, upload-time = "2026-03-11T14:10:44.635Z" }, + { url = "https://files.pythonhosted.org/packages/33/d4/ecfbecf763d42606dba8ab9d7de557d01816afad1e2f3cb1cc7efd6fc254/ckzg-2.1.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:42005c188e37c2f65d44f3a2585e89de18e0e229bc667a600d8716808ea2c33b", size = 183607, upload-time = "2026-03-11T14:10:45.846Z" }, + { url = "https://files.pythonhosted.org/packages/4a/72/becb801d8f1224de265f299790f5b2c95e71546ab7ab24a1fd3ebb99519e/ckzg-2.1.7-cp314-cp314-win_amd64.whl", hash = "sha256:14fbc642b1e81893df76a1636fddc169173da5dcdb55fc08a030658cd186150e", size = 102517, upload-time = "2026-03-11T14:10:47.079Z" }, + { url = "https://files.pythonhosted.org/packages/a8/6c/b310f05a6a27baaa53915b43483cc061080e3245c7facaa3c5b3a3cd7c5e/ckzg-2.1.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:da1a07e25ecaeb341ad4caf583fdec12c6af1ef3642289bb7dfcad2ca1b73dd3", size = 96609, upload-time = "2026-03-11T14:10:48.019Z" }, + { url = "https://files.pythonhosted.org/packages/0d/96/e1ccbf3f90595d50aa98a8a9c3c1327e6be0575ddbf8292b26b0cfa69b06/ckzg-2.1.7-cp314-cp314t-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:c657892f93eb70e3295b4f385e25380644c40f8bfebfcd55659f5017257c5b8c", size = 183315, upload-time = "2026-03-11T14:10:49.224Z" }, + { url = "https://files.pythonhosted.org/packages/bc/94/2c7ff1983f82756b29011ad612bc0e1d8f4a1989073c94fd66868bc296d3/ckzg-2.1.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:03af4cf053be82c22a893c8ef971d17687182dd2e75bcc2fab320bc27a62b7cb", size = 169457, upload-time = "2026-03-11T14:10:50.601Z" }, + { url = "https://files.pythonhosted.org/packages/98/cd/8c7247181843185ff5e34ebd400594e0fbe2d81e03324f124834f377ea74/ckzg-2.1.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6ecd9c44427a0035a8a9cb3dc18b4b3c72347f7be7c9f6866b8eddd6598bf0a9", size = 178841, upload-time = "2026-03-11T14:10:51.598Z" }, + { url = "https://files.pythonhosted.org/packages/da/cb/cf2ed4cf461bd2891792317615075745053e2585d8a2cf26a8414ad01983/ckzg-2.1.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:16e313e6029e88a564724217dd8eddd6226fbf0a0c07bf65a210bf3512c7b8ad", size = 176489, upload-time = "2026-03-11T14:10:52.905Z" }, + { url = "https://files.pythonhosted.org/packages/50/65/8b7d9cf8883f0df1a15cb20ecec99dfc02fc7bf05bf53509bb270e3a1db0/ckzg-2.1.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8461ec7d69ccb450d4a4d031494a86dc6c15ad54b671967d4a8bdcd8158155b2", size = 191690, upload-time = "2026-03-11T14:10:53.855Z" }, + { url = "https://files.pythonhosted.org/packages/83/56/a1fba1b4a2f90d5fc48d3e62f59f0791c90e85b6ebb600ffeee81ea9cfa6/ckzg-2.1.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:53f420a3fa55a92265e23394caa2aac5b0e1e63ee6489d414cafeb0accde9a9e", size = 186204, upload-time = "2026-03-11T14:10:54.821Z" }, + { url = "https://files.pythonhosted.org/packages/c7/a9/a3284a64216f31a886ff216621c6b3806ca7ad7388908f68fcab9007c881/ckzg-2.1.7-cp314-cp314t-win_amd64.whl", hash = "sha256:2cdcc023d842900564d6070e397cab0d04fd393e6af07d60bdd1c97dc3ff09fd", size = 102660, upload-time = "2026-03-11T14:10:55.974Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "cytoolz" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "toolz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bd/d4/16916f3dc20a3f5455b63c35dcb260b3716f59ce27a93586804e70e431d5/cytoolz-1.1.0.tar.gz", hash = "sha256:13a7bf254c3c0d28b12e2290b82aed0f0977a4c2a2bf84854fcdc7796a29f3b0", size = 642510, upload-time = "2025-10-19T00:44:56.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/ec/01426224f7acf60183d3921b25e1a8e71713d3d39cb464d64ac7aace6ea6/cytoolz-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:99f8e134c9be11649342853ec8c90837af4089fc8ff1e8f9a024a57d1fa08514", size = 1327800, upload-time = "2025-10-19T00:40:48.674Z" }, + { url = "https://files.pythonhosted.org/packages/b4/07/e07e8fedd332ac9626ad58bea31416dda19bfd14310731fa38b16a97e15f/cytoolz-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0a6f44cf9319c30feb9a50aa513d777ef51efec16f31c404409e7deb8063df64", size = 997118, upload-time = "2025-10-19T00:40:50.919Z" }, + { url = "https://files.pythonhosted.org/packages/ab/72/c0f766d63ed2f9ea8dc8e1628d385d99b41fb834ce17ac3669e3f91e115d/cytoolz-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:945580dc158c557172fca899a35a99a16fbcebf6db0c77cb6621084bc82189f9", size = 991169, upload-time = "2025-10-19T00:40:52.887Z" }, + { url = "https://files.pythonhosted.org/packages/df/4b/1f757353d1bf33e56a7391ecc9bc49c1e529803b93a9d2f67fe5f92906fe/cytoolz-1.1.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:257905ec050d04f2f856854620d1e25556fd735064cebd81b460f54939b9f9d5", size = 2700680, upload-time = "2025-10-19T00:40:54.597Z" }, + { url = "https://files.pythonhosted.org/packages/25/73/9b25bb7ed8d419b9d6ff2ae0b3d06694de79a3f98f5169a1293ff7ad3a3f/cytoolz-1.1.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:82779049f352fb3ab5e8c993ab45edbb6e02efb1f17f0b50f4972c706cc51d76", size = 2824951, upload-time = "2025-10-19T00:40:56.137Z" }, + { url = "https://files.pythonhosted.org/packages/0c/93/9c787f7c909e75670fff467f2504725d06d8c3f51d6dfe22c55a08c8ccd4/cytoolz-1.1.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7d3e405e435320e08c5a1633afaf285a392e2d9cef35c925d91e2a31dfd7a688", size = 2679635, upload-time = "2025-10-19T00:40:57.799Z" }, + { url = "https://files.pythonhosted.org/packages/50/aa/9ee92c302cccf7a41a7311b325b51ebeff25d36c1f82bdc1bbe3f58dc947/cytoolz-1.1.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:923df8f5591e0d20543060c29909c149ab1963a7267037b39eee03a83dbc50a8", size = 2938352, upload-time = "2025-10-19T00:40:59.49Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a3/3b58c5c1692c3bacd65640d0d5c7267a7ebb76204f7507aec29de7063d2f/cytoolz-1.1.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:25db9e4862f22ea0ae2e56c8bec9fc9fd756b655ae13e8c7b5625d7ed1c582d4", size = 3022121, upload-time = "2025-10-19T00:41:01.209Z" }, + { url = "https://files.pythonhosted.org/packages/e1/93/c647bc3334355088c57351a536c2d4a83dd45f7de591fab383975e45bff9/cytoolz-1.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7a98deb11ccd8e5d9f9441ef2ff3352aab52226a2b7d04756caaa53cd612363", size = 2857656, upload-time = "2025-10-19T00:41:03.456Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c2/43fea146bf4141deea959e19dcddf268c5ed759dec5c2ed4a6941d711933/cytoolz-1.1.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:dce4ee9fc99104bc77efdea80f32ca5a650cd653bcc8a1d984a931153d3d9b58", size = 2551284, upload-time = "2025-10-19T00:41:05.347Z" }, + { url = "https://files.pythonhosted.org/packages/6f/df/cdc7a81ce5cfcde7ef523143d545635fc37e80ccacce140ae58483a21da3/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80d6da158f7d20c15819701bbda1c041f0944ede2f564f5c739b1bc80a9ffb8b", size = 2721673, upload-time = "2025-10-19T00:41:07.528Z" }, + { url = "https://files.pythonhosted.org/packages/45/be/f8524bb9ad8812ad375e61238dcaa3177628234d1b908ad0b74e3657cafd/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:3b5c5a192abda123ad45ef716ec9082b4cf7d95e9ada8291c5c2cc5558be858b", size = 2722884, upload-time = "2025-10-19T00:41:09.698Z" }, + { url = "https://files.pythonhosted.org/packages/23/e6/6bb8e4f9c267ad42d1ff77b6d2e4984665505afae50a216290e1d7311431/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5b399ce7d967b1cb6280250818b786be652aa8ddffd3c0bb5c48c6220d945ab5", size = 2685486, upload-time = "2025-10-19T00:41:11.349Z" }, + { url = "https://files.pythonhosted.org/packages/d7/dd/88619f9c8d2b682562c0c886bbb7c35720cb83fda2ac9a41bdd14073d9bd/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e7e29a1a03f00b4322196cfe8e2c38da9a6c8d573566052c586df83aacc5663c", size = 2839661, upload-time = "2025-10-19T00:41:13.053Z" }, + { url = "https://files.pythonhosted.org/packages/b8/8d/4478ebf471ee78dd496d254dc0f4ad729cd8e6ba8257de4f0a98a2838ef2/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5291b117d71652a817ec164e7011f18e6a51f8a352cc9a70ed5b976c51102fda", size = 2547095, upload-time = "2025-10-19T00:41:16.054Z" }, + { url = "https://files.pythonhosted.org/packages/e6/68/f1dea33367b0b3f64e199c230a14a6b6f243c189020effafd31e970ca527/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8caef62f846a9011676c51bda9189ae394cdd6bb17f2946ecaedc23243268320", size = 2870901, upload-time = "2025-10-19T00:41:17.727Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9a/33591c09dfe799b8fb692cf2ad383e2c41ab6593cc960b00d1fc8a145655/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:de425c5a8e3be7bb3a195e19191d28d9eb3c2038046064a92edc4505033ec9cb", size = 2765422, upload-time = "2025-10-19T00:41:20.075Z" }, + { url = "https://files.pythonhosted.org/packages/60/2b/a8aa233c9416df87f004e57ae4280bd5e1f389b4943d179f01020c6ec629/cytoolz-1.1.0-cp312-cp312-win32.whl", hash = "sha256:296440a870e8d1f2e1d1edf98f60f1532b9d3ab8dfbd4b25ec08cd76311e79e5", size = 901933, upload-time = "2025-10-19T00:41:21.646Z" }, + { url = "https://files.pythonhosted.org/packages/ad/33/4c9bdf8390dc01d2617c7f11930697157164a52259b6818ddfa2f94f89f4/cytoolz-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:07156987f224c6dac59aa18fb8bf91e1412f5463961862716a3381bf429c8699", size = 947989, upload-time = "2025-10-19T00:41:23.288Z" }, + { url = "https://files.pythonhosted.org/packages/35/ac/6e2708835875f5acb52318462ed296bf94ed0cb8c7cb70e62fbd03f709e3/cytoolz-1.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:23e616b38f5b3160c7bb45b0f84a8f3deb4bd26b29fb2dfc716f241c738e27b8", size = 903913, upload-time = "2025-10-19T00:41:24.992Z" }, + { url = "https://files.pythonhosted.org/packages/71/4a/b3ddb3ee44fe0045e95dd973746f93f033b6f92cce1fc3cbbe24b329943c/cytoolz-1.1.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:76c9b58555300be6dde87a41faf1f97966d79b9a678b7a526fcff75d28ef4945", size = 976728, upload-time = "2025-10-19T00:41:26.5Z" }, + { url = "https://files.pythonhosted.org/packages/42/21/a3681434aa425875dd828bb515924b0f12c37a55c7d2bc5c0c5de3aeb0b4/cytoolz-1.1.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d1d638b10d3144795655e9395566ce35807df09219fd7cacd9e6acbdef67946a", size = 986057, upload-time = "2025-10-19T00:41:28.911Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cb/efc1b29e211e0670a6953222afaac84dcbba5cb940b130c0e49858978040/cytoolz-1.1.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:26801c1a165e84786a99e03c9c9973356caaca002d66727b761fb1042878ef06", size = 992632, upload-time = "2025-10-19T00:41:30.612Z" }, + { url = "https://files.pythonhosted.org/packages/be/b0/e50621d21e939338c97faab651f58ea7fa32101226a91de79ecfb89d71e1/cytoolz-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2a9a464542912d3272f6dccc5142df057c71c6a5cbd30439389a732df401afb7", size = 1317534, upload-time = "2025-10-19T00:41:32.625Z" }, + { url = "https://files.pythonhosted.org/packages/0d/6b/25aa9739b0235a5bc4c1ea293186bc6822a4c6607acfe1422423287e7400/cytoolz-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ed6104fa942aa5784bf54f339563de637557e3443b105760bc4de8f16a7fc79b", size = 992336, upload-time = "2025-10-19T00:41:34.073Z" }, + { url = "https://files.pythonhosted.org/packages/e1/53/5f4deb0ff958805309d135d899c764364c1e8a632ce4994bd7c45fb98df2/cytoolz-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56161f0ab60dc4159ec343509abaf809dc88e85c7e420e354442c62e3e7cbb77", size = 986118, upload-time = "2025-10-19T00:41:35.7Z" }, + { url = "https://files.pythonhosted.org/packages/1c/e3/f6255b76c8cc0debbe1c0779130777dc0434da6d9b28a90d9f76f8cb67cd/cytoolz-1.1.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:832bd36cc9123535f1945acf6921f8a2a15acc19cfe4065b1c9b985a28671886", size = 2679563, upload-time = "2025-10-19T00:41:37.926Z" }, + { url = "https://files.pythonhosted.org/packages/59/8a/acc6e39a84e930522b965586ad3a36694f9bf247b23188ee0eb47b1c9ed1/cytoolz-1.1.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1842636b6e034f229bf084c2bcdcfd36c8437e752eefd2c74ce9e2f10415cb6e", size = 2813020, upload-time = "2025-10-19T00:41:39.935Z" }, + { url = "https://files.pythonhosted.org/packages/db/f5/0083608286ad1716eda7c41f868e85ac549f6fd6b7646993109fa0bdfd98/cytoolz-1.1.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:823df012ab90d2f2a0f92fea453528539bf71ac1879e518524cd0c86aa6df7b9", size = 2669312, upload-time = "2025-10-19T00:41:41.55Z" }, + { url = "https://files.pythonhosted.org/packages/47/a8/d16080b575520fe5da00cede1ece4e0a4180ec23f88dcdc6a2f5a90a7f7f/cytoolz-1.1.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2f1fcf9e7e7b3487883ff3f815abc35b89dcc45c4cf81c72b7ee457aa72d197b", size = 2922147, upload-time = "2025-10-19T00:41:43.252Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bc/716c9c1243701e58cad511eb3937fd550e645293c5ed1907639c5d66f194/cytoolz-1.1.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4cdb3fa1772116827f263f25b0cdd44c663b6701346a56411960534a06c082de", size = 2981602, upload-time = "2025-10-19T00:41:45.354Z" }, + { url = "https://files.pythonhosted.org/packages/14/bc/571b232996846b27f4ac0c957dc8bf60261e9b4d0d01c8d955e82329544e/cytoolz-1.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1b5c95041741b81430454db65183e133976f45ac3c03454cfa8147952568529", size = 2830103, upload-time = "2025-10-19T00:41:47.959Z" }, + { url = "https://files.pythonhosted.org/packages/5b/55/c594afb46ecd78e4b7e1fb92c947ed041807875661ceda73baaf61baba4f/cytoolz-1.1.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b2079fd9f1a65f4c61e6278c8a6d4f85edf30c606df8d5b32f1add88cbbe2286", size = 2533802, upload-time = "2025-10-19T00:41:49.683Z" }, + { url = "https://files.pythonhosted.org/packages/93/83/1edcf95832555a78fc43b975f3ebe8ceadcc9664dd47fd33747a14df5069/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a92a320d72bef1c7e2d4c6d875125cf57fc38be45feb3fac1bfa64ea401f54a4", size = 2706071, upload-time = "2025-10-19T00:41:51.386Z" }, + { url = "https://files.pythonhosted.org/packages/e2/df/035a408df87f25cfe3611557818b250126cd2281b2104cd88395de205583/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:06d1c79aa51e6a92a90b0e456ebce2288f03dd6a76c7f582bfaa3eda7692e8a5", size = 2707575, upload-time = "2025-10-19T00:41:53.305Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a4/ef78e13e16e93bf695a9331321d75fbc834a088d941f1c19e6b63314e257/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e1d7be25f6971e986a52b6d3a0da28e1941850985417c35528f6823aef2cfec5", size = 2660486, upload-time = "2025-10-19T00:41:55.542Z" }, + { url = "https://files.pythonhosted.org/packages/30/7a/2c3d60682b26058d435416c4e90d4a94db854de5be944dfd069ed1be648a/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:964b248edc31efc50a65e9eaa0c845718503823439d2fa5f8d2c7e974c2b5409", size = 2819605, upload-time = "2025-10-19T00:41:58.257Z" }, + { url = "https://files.pythonhosted.org/packages/45/92/19b722a1d83cc443fbc0c16e0dc376f8a451437890d3d9ee370358cf0709/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c9ff2b3c57c79b65cb5be14a18c6fd4a06d5036fb3f33e973a9f70e9ac13ca28", size = 2533559, upload-time = "2025-10-19T00:42:00.324Z" }, + { url = "https://files.pythonhosted.org/packages/1d/15/fa3b7891da51115204416f14192081d3dea0eaee091f123fdc1347de8dd1/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:22290b73086af600042d99f5ce52a43d4ad9872c382610413176e19fc1d4fd2d", size = 2839171, upload-time = "2025-10-19T00:42:01.881Z" }, + { url = "https://files.pythonhosted.org/packages/46/40/d3519d5cd86eebebf1e8b7174ec32dfb6ecec67b48b0cfb92bf226659b5a/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a2ade74fccd080ea793382968913ee38d7a35c921df435bbf0a6aeecf0d17574", size = 2743379, upload-time = "2025-10-19T00:42:03.809Z" }, + { url = "https://files.pythonhosted.org/packages/93/e2/a9e7511f0a13fdbefa5bf73cf8e4763878140de9453fd3e50d6ac57b6be7/cytoolz-1.1.0-cp313-cp313-win32.whl", hash = "sha256:db5dbcfda1c00e937426cbf9bdc63c24ebbc358c3263bfcbc1ab4a88dc52aa8e", size = 900844, upload-time = "2025-10-19T00:42:05.967Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a4/fb7eb403c6a4c81e5a30363f34a71adcc8bf5292dc8ea32e2440aa5668f2/cytoolz-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9e2d3fe3b45c3eb7233746f7aca37789be3dceec3e07dcc406d3e045ea0f7bdc", size = 946461, upload-time = "2025-10-19T00:42:07.983Z" }, + { url = "https://files.pythonhosted.org/packages/93/bb/1c8c33d353548d240bc6e8677ee8c3560ce5fa2f084e928facf7c35a6dcf/cytoolz-1.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:32c559f95ff44a9ebcbd934acaa1e6dc8f3e6ffce4762a79a88528064873d6d5", size = 902673, upload-time = "2025-10-19T00:42:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ba/4a53acc60f59030fcaf48c7766e3c4c81bd997379425aa45b129396557b5/cytoolz-1.1.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9e2cd93b28f667c5870a070ab2b8bb4397470a85c4b204f2454b0ad001cd1ca3", size = 1372336, upload-time = "2025-10-19T00:42:12.104Z" }, + { url = "https://files.pythonhosted.org/packages/ac/90/f28fd8ad8319d8f5c8da69a2c29b8cf52a6d2c0161602d92b366d58926ab/cytoolz-1.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f494124e141a9361f31d79875fe7ea459a3be2b9dadd90480427c0c52a0943d4", size = 1011930, upload-time = "2025-10-19T00:42:14.231Z" }, + { url = "https://files.pythonhosted.org/packages/c9/95/4561c4e0ad1c944f7673d6d916405d68080f10552cfc5d69a1cf2475a9a1/cytoolz-1.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:53a3262bf221f19437ed544bf8c0e1980c81ac8e2a53d87a9bc075dba943d36f", size = 1020610, upload-time = "2025-10-19T00:42:15.877Z" }, + { url = "https://files.pythonhosted.org/packages/c3/14/b2e1ffa4995ec36e1372e243411ff36325e4e6d7ffa34eb4098f5357d176/cytoolz-1.1.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:47663e57d3f3f124921f38055e86a1022d0844c444ede2e8f090d3bbf80deb65", size = 2917327, upload-time = "2025-10-19T00:42:17.706Z" }, + { url = "https://files.pythonhosted.org/packages/4a/29/7cab6c609b4514ac84cca2f7dca6c509977a8fc16d27c3a50e97f105fa6a/cytoolz-1.1.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a5a8755c4104ee4e3d5ba434c543b5f85fdee6a1f1df33d93f518294da793a60", size = 3108951, upload-time = "2025-10-19T00:42:19.363Z" }, + { url = "https://files.pythonhosted.org/packages/9a/71/1d1103b819458679277206ad07d78ca6b31c4bb88d6463fd193e19bfb270/cytoolz-1.1.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4d96ff3d381423af1b105295f97de86d1db51732c9566eb37378bab6670c5010", size = 2807149, upload-time = "2025-10-19T00:42:20.964Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d4/3d83a05a21e7d2ed2b9e6daf489999c29934b005de9190272b8a2e3735d0/cytoolz-1.1.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0ec96b3d537cdf47d4e76ded199f7440715f4c71029b45445cff92c1248808c2", size = 3111608, upload-time = "2025-10-19T00:42:22.684Z" }, + { url = "https://files.pythonhosted.org/packages/51/88/96f68354c3d4af68de41f0db4fe41a23b96a50a4a416636cea325490cfeb/cytoolz-1.1.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:208e2f2ef90a32b0acbff3303d90d89b13570a228d491d2e622a7883a3c68148", size = 3179373, upload-time = "2025-10-19T00:42:24.395Z" }, + { url = "https://files.pythonhosted.org/packages/ce/50/ed87a5cd8e6f27ffbb64c39e9730e18ec66c37631db2888ae711909f10c9/cytoolz-1.1.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d416a81bb0bd517558668e49d30a7475b5445f9bbafaab7dcf066f1e9adba36", size = 3003120, upload-time = "2025-10-19T00:42:26.18Z" }, + { url = "https://files.pythonhosted.org/packages/d3/a7/acde155b050d6eaa8e9c7845c98fc5fb28501568e78e83ebbf44f8855274/cytoolz-1.1.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f32e94c91ffe49af04835ee713ebd8e005c85ebe83e7e1fdcc00f27164c2d636", size = 2703225, upload-time = "2025-10-19T00:42:27.93Z" }, + { url = "https://files.pythonhosted.org/packages/1b/b6/9d518597c5bdea626b61101e8d2ff94124787a42259dafd9f5fc396f346a/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15d0c6405efc040499c46df44056a5c382f551a7624a41cf3e4c84a96b988a15", size = 2956033, upload-time = "2025-10-19T00:42:29.993Z" }, + { url = "https://files.pythonhosted.org/packages/89/7a/93e5f860926165538c85e1c5e1670ad3424f158df810f8ccd269da652138/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:bf069c5381d757debae891401b88b3a346ba3a28ca45ba9251103b282463fad8", size = 2862950, upload-time = "2025-10-19T00:42:31.803Z" }, + { url = "https://files.pythonhosted.org/packages/76/e6/99d6af00487bedc27597b54c9fcbfd5c833a69c6b7a9b9f0fff777bfc7aa/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d5cf15892e63411ec1bd67deff0e84317d974e6ab2cdfefdd4a7cea2989df66", size = 2861757, upload-time = "2025-10-19T00:42:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/71/ca/adfa1fb7949478135a37755cb8e88c20cd6b75c22a05f1128f05f3ab2c60/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3e3872c21170f8341656f8692f8939e8800dcee6549ad2474d4c817bdefd62cd", size = 2979049, upload-time = "2025-10-19T00:42:35.377Z" }, + { url = "https://files.pythonhosted.org/packages/70/4c/7bf47a03a4497d500bc73d4204e2d907771a017fa4457741b2a1d7c09319/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:b9ddeff8e8fd65eb1fcefa61018100b2b627e759ea6ad275d2e2a93ffac147bf", size = 2699492, upload-time = "2025-10-19T00:42:37.133Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e7/3d034b0e4817314f07aa465d5864e9b8df9d25cb260a53dd84583e491558/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:02feeeda93e1fa3b33414eb57c2b0aefd1db8f558dd33fdfcce664a0f86056e4", size = 2995646, upload-time = "2025-10-19T00:42:38.912Z" }, + { url = "https://files.pythonhosted.org/packages/c1/62/be357181c71648d9fe1d1ce91cd42c63457dcf3c158e144416fd51dced83/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d08154ad45349162b6c37f12d5d1b2e6eef338e657b85e1621e4e6a4a69d64cb", size = 2919481, upload-time = "2025-10-19T00:42:40.85Z" }, + { url = "https://files.pythonhosted.org/packages/62/d5/bf5434fde726c4f80cb99912b2d8e0afa1587557e2a2d7e0315eb942f2de/cytoolz-1.1.0-cp313-cp313t-win32.whl", hash = "sha256:10ae4718a056948d73ca3e1bb9ab1f95f897ec1e362f829b9d37cc29ab566c60", size = 951595, upload-time = "2025-10-19T00:42:42.877Z" }, + { url = "https://files.pythonhosted.org/packages/64/29/39c161e9204a9715321ddea698cbd0abc317e78522c7c642363c20589e71/cytoolz-1.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:1bb77bc6197e5cb19784b6a42bb0f8427e81737a630d9d7dda62ed31733f9e6c", size = 1004445, upload-time = "2025-10-19T00:42:44.855Z" }, + { url = "https://files.pythonhosted.org/packages/e2/5a/7cbff5e9a689f558cb0bdf277f9562b2ac51acf7cd15e055b8c3efb0e1ef/cytoolz-1.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:563dda652c6ff52d215704fbe6b491879b78d7bbbb3a9524ec8e763483cb459f", size = 926207, upload-time = "2025-10-19T00:42:46.456Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e8/297a85ba700f437c01eba962428e6ab4572f6c3e68e8ff442ce5c9d3a496/cytoolz-1.1.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:d542cee7c7882d2a914a33dec4d3600416fb336734df979473249d4c53d207a1", size = 980613, upload-time = "2025-10-19T00:42:47.988Z" }, + { url = "https://files.pythonhosted.org/packages/e8/d7/2b02c9d18e9cc263a0e22690f78080809f1eafe72f26b29ccc115d3bf5c8/cytoolz-1.1.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:31922849b701b0f24bb62e56eb2488dcd3aa6ae3057694bd6b3b7c4c2bc27c2f", size = 990476, upload-time = "2025-10-19T00:42:49.653Z" }, + { url = "https://files.pythonhosted.org/packages/89/26/b6b159d2929310fca0eff8a4989cd4b1ecbdf7c46fdff46c7a20fcae55c8/cytoolz-1.1.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:e68308d32afd31943314735c1335e4ab5696110e96b405f6bdb8f2a8dc771a16", size = 992712, upload-time = "2025-10-19T00:42:51.306Z" }, + { url = "https://files.pythonhosted.org/packages/42/a0/f7c572aa151ed466b0fce4a327c3cc916d3ef3c82e341be59ea4b9bee9e4/cytoolz-1.1.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fc4bb48b3b866e1867f7c6411a4229e5b44be3989060663713e10efc24c9bd5f", size = 1322596, upload-time = "2025-10-19T00:42:52.978Z" }, + { url = "https://files.pythonhosted.org/packages/72/7c/a55d035e20b77b6725e85c8f1a418b3a4c23967288b8b0c2d1a40f158cbe/cytoolz-1.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:456f77207d1445025d7ef262b8370a05492dcb1490cb428b0f3bf1bd744a89b0", size = 992825, upload-time = "2025-10-19T00:42:55.026Z" }, + { url = "https://files.pythonhosted.org/packages/03/af/39d2d3db322136e12e9336a1f13bab51eab88b386bfb11f91d3faff8ba34/cytoolz-1.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:174ebc71ebb20a9baeffce6ee07ee2cd913754325c93f99d767380d8317930f7", size = 990525, upload-time = "2025-10-19T00:42:56.666Z" }, + { url = "https://files.pythonhosted.org/packages/a6/bd/65d7a869d307f9b10ad45c2c1cbb40b81a8d0ed1138fa17fd904f5c83298/cytoolz-1.1.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8b3604fef602bcd53415055a4f68468339192fd17be39e687ae24f476d23d56e", size = 2672409, upload-time = "2025-10-19T00:42:58.81Z" }, + { url = "https://files.pythonhosted.org/packages/2d/fb/74dfd844bfd67e810bd36e8e3903a143035447245828e7fcd7c81351d775/cytoolz-1.1.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3604b959a01f64c366e7d10ec7634d5f5cfe10301e27a8f090f6eb3b2a628a18", size = 2808477, upload-time = "2025-10-19T00:43:00.577Z" }, + { url = "https://files.pythonhosted.org/packages/d6/1f/587686c43e31c19241ec317da66438d093523921ea7749bbc65558a30df9/cytoolz-1.1.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6db2127a3c1bc2f59f08010d2ae53a760771a9de2f67423ad8d400e9ba4276e8", size = 2636881, upload-time = "2025-10-19T00:43:02.24Z" }, + { url = "https://files.pythonhosted.org/packages/bc/6d/90468cd34f77cb38a11af52c4dc6199efcc97a486395a21bef72e9b7602e/cytoolz-1.1.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:56584745ac647993a016a21bc76399113b7595e312f8d0a1b140c9fcf9b58a27", size = 2937315, upload-time = "2025-10-19T00:43:03.954Z" }, + { url = "https://files.pythonhosted.org/packages/d9/50/7b92cd78c613b92e3509e6291d3fb7e0d72ebda999a8df806a96c40ca9ab/cytoolz-1.1.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:db2c4c3a7f7bd7e03bb1a236a125c8feb86c75802f4ecda6ecfaf946610b2930", size = 2959988, upload-time = "2025-10-19T00:43:05.758Z" }, + { url = "https://files.pythonhosted.org/packages/44/d5/34b5a28a8d9bb329f984b4c2259407ca3f501d1abeb01bacea07937d85d1/cytoolz-1.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:48cb8a692111a285d2b9acd16d185428176bfbffa8a7c274308525fccd01dd42", size = 2795116, upload-time = "2025-10-19T00:43:07.411Z" }, + { url = "https://files.pythonhosted.org/packages/f5/d9/5dd829e33273ec03bdc3c812e6c3281987ae2c5c91645582f6c331544a64/cytoolz-1.1.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d2f344ba5eb17dcf38ee37fdde726f69053f54927db8f8a1bed6ac61e5b1890d", size = 2535390, upload-time = "2025-10-19T00:43:09.104Z" }, + { url = "https://files.pythonhosted.org/packages/87/1f/7f9c58068a8eec2183110df051bc6b69dd621143f84473eeb6dc1b32905a/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:abf76b1c1abd031f098f293b6d90ee08bdaa45f8b5678430e331d991b82684b1", size = 2704834, upload-time = "2025-10-19T00:43:10.942Z" }, + { url = "https://files.pythonhosted.org/packages/d2/90/667def5665333575d01a65fe3ec0ca31b897895f6e3bc1a42d6ea3659369/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:ddf9a38a5b686091265ff45b53d142e44a538cd6c2e70610d3bc6be094219032", size = 2658441, upload-time = "2025-10-19T00:43:12.655Z" }, + { url = "https://files.pythonhosted.org/packages/23/79/6615f9a14960bd29ac98b823777b6589357833f65cf1a11b5abc1587c120/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:946786755274f07bb2be0400f28adb31d7d85a7c7001873c0a8e24a503428fb3", size = 2654766, upload-time = "2025-10-19T00:43:14.325Z" }, + { url = "https://files.pythonhosted.org/packages/b0/99/be59c6e0ae02153ef10ae1ff0f380fb19d973c651b50cf829a731f6c9e79/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:d5b8f78b9fed79cf185ad4ddec099abeef45951bdcb416c5835ba05f0a1242c7", size = 2827649, upload-time = "2025-10-19T00:43:16.132Z" }, + { url = "https://files.pythonhosted.org/packages/19/b7/854ddcf9f9618844108677c20d48f4611b5c636956adea0f0e85e027608f/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:fccde6efefdbc02e676ccb352a2ccc8a8e929f59a1c6d3d60bb78e923a49ca44", size = 2533456, upload-time = "2025-10-19T00:43:17.764Z" }, + { url = "https://files.pythonhosted.org/packages/45/66/bfe6fbb2bdcf03c8377c8c2f542576e15f3340c905a09d78a6cb3badd39a/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:717b7775313da5f51b0fbf50d865aa9c39cb241bd4cb605df3cf2246d6567397", size = 2826455, upload-time = "2025-10-19T00:43:19.561Z" }, + { url = "https://files.pythonhosted.org/packages/c3/0c/cce4047bd927e95f59e73319c02c9bc86bd3d76392e0eb9e41a1147a479c/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5158744a09d0e0e4a4f82225e3a3c4ebf38f9ae74467aaa905467270e52f2794", size = 2714897, upload-time = "2025-10-19T00:43:21.291Z" }, + { url = "https://files.pythonhosted.org/packages/ac/9a/061323bb289b565802bad14fb7ab59fcd8713105df142bcf4dd9ff64f8ac/cytoolz-1.1.0-cp314-cp314-win32.whl", hash = "sha256:1ed534bdbbf063b2bb28fca7d0f6723a3e5a72b086e7c7fe6d74ae8c3e4d00e2", size = 901490, upload-time = "2025-10-19T00:43:22.895Z" }, + { url = "https://files.pythonhosted.org/packages/a3/20/1f3a733d710d2a25d6f10b463bef55ada52fe6392a5d233c8d770191f48a/cytoolz-1.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:472c1c9a085f5ad973ec0ad7f0b9ba0969faea6f96c9e397f6293d386f3a25ec", size = 946730, upload-time = "2025-10-19T00:43:24.838Z" }, + { url = "https://files.pythonhosted.org/packages/f2/22/2d657db4a5d1c10a152061800f812caba9ef20d7bd2406f51a5fd800c180/cytoolz-1.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:a7ad7ca3386fa86bd301be3fa36e7f0acb024f412f665937955acfc8eb42deff", size = 905722, upload-time = "2025-10-19T00:43:26.439Z" }, + { url = "https://files.pythonhosted.org/packages/19/97/b4a8c76796a9a8b9bc90c7992840fa1589a1af8e0426562dea4ce9b384a7/cytoolz-1.1.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:64b63ed4b71b1ba813300ad0f06b8aff19a12cf51116e0e4f1ed837cea4debcf", size = 1372606, upload-time = "2025-10-19T00:43:28.491Z" }, + { url = "https://files.pythonhosted.org/packages/08/d4/a1bb1a32b454a2d650db8374ff3bf875ba0fc1c36e6446ec02a83b9140a1/cytoolz-1.1.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:a60ba6f2ed9eb0003a737e1ee1e9fa2258e749da6477946008d4324efa25149f", size = 1012189, upload-time = "2025-10-19T00:43:30.177Z" }, + { url = "https://files.pythonhosted.org/packages/21/4b/2f5cbbd81588918ee7dd70cffb66731608f578a9b72166aafa991071af7d/cytoolz-1.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1aa58e2434d732241f7f051e6f17657e969a89971025e24578b5cbc6f1346485", size = 1020624, upload-time = "2025-10-19T00:43:31.712Z" }, + { url = "https://files.pythonhosted.org/packages/f5/99/c4954dd86cd593cd776a038b36795a259b8b5c12cbab6363edf5f6d9c909/cytoolz-1.1.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6965af3fc7214645970e312deb9bd35a213a1eaabcfef4f39115e60bf2f76867", size = 2917016, upload-time = "2025-10-19T00:43:33.531Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7c/f1f70a17e272b433232bc8a27df97e46b202d6cc07e3b0d63f7f41ba0f2d/cytoolz-1.1.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ddd2863f321d67527d3b67a93000a378ad6f967056f68c06467fe011278a6d0e", size = 3107634, upload-time = "2025-10-19T00:43:35.57Z" }, + { url = "https://files.pythonhosted.org/packages/8f/bd/c3226a57474b4aef1f90040510cba30d0decd3515fed48dc229b37c2f898/cytoolz-1.1.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4e6b428e9eb5126053c2ae0efa62512ff4b38ed3951f4d0888ca7005d63e56f5", size = 2806221, upload-time = "2025-10-19T00:43:37.707Z" }, + { url = "https://files.pythonhosted.org/packages/c3/47/2f7bfe4aaa1e07dc9828bea228ed744faf73b26aee0c1bdf3b5520bf1909/cytoolz-1.1.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d758e5ef311d2671e0ae8c214c52e44617cf1e58bef8f022b547b9802a5a7f30", size = 3107671, upload-time = "2025-10-19T00:43:39.401Z" }, + { url = "https://files.pythonhosted.org/packages/4d/12/6ff3b04fbd1369d0fcd5f8b5910ba6e427e33bf113754c4c35ec3f747924/cytoolz-1.1.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a95416eca473e6c1179b48d86adcf528b59c63ce78f4cb9934f2e413afa9b56b", size = 3176350, upload-time = "2025-10-19T00:43:41.148Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/6691d986b728e77b5d2872743ebcd962d37a2d0f7e9ad95a81b284fbf905/cytoolz-1.1.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:36c8ede93525cf11e2cc787b7156e5cecd7340193ef800b816a16f1404a8dc6d", size = 3001173, upload-time = "2025-10-19T00:43:42.923Z" }, + { url = "https://files.pythonhosted.org/packages/7a/cb/f59d83a5058e1198db5a1f04e4a124c94d60390e4fa89b6d2e38ee8288a0/cytoolz-1.1.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c949755b6d8a649c5fbc888bc30915926f1b09fe42fea9f289e297c2f6ddd3", size = 2701374, upload-time = "2025-10-19T00:43:44.716Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f0/1ae6d28df503b0bdae094879da2072b8ba13db5919cd3798918761578411/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e1b6d37545816905a76d9ed59fa4e332f929e879f062a39ea0f6f620405cdc27", size = 2953081, upload-time = "2025-10-19T00:43:47.103Z" }, + { url = "https://files.pythonhosted.org/packages/f4/06/d86fe811c6222dc32d3e08f5d88d2be598a6055b4d0590e7c1428d55c386/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:05332112d4087904842b36954cd1d3fc0e463a2f4a7ef9477bd241427c593c3b", size = 2862228, upload-time = "2025-10-19T00:43:49.353Z" }, + { url = "https://files.pythonhosted.org/packages/ae/32/978ef6f42623be44a0a03ae9de875ab54aa26c7e38c5c4cd505460b0927d/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:31538ca2fad2d688cbd962ccc3f1da847329e2258a52940f10a2ac0719e526be", size = 2861971, upload-time = "2025-10-19T00:43:51.028Z" }, + { url = "https://files.pythonhosted.org/packages/ee/f7/74c69497e756b752b359925d1feef68b91df024a4124a823740f675dacd3/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:747562aa70abf219ea16f07d50ac0157db856d447f7f498f592e097cbc77df0b", size = 2975304, upload-time = "2025-10-19T00:43:52.99Z" }, + { url = "https://files.pythonhosted.org/packages/5b/2b/3ce0e6889a6491f3418ad4d84ae407b8456b02169a5a1f87990dbba7433b/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:3dc15c48b20c0f467e15e341e102896c8422dccf8efc6322def5c1b02f074629", size = 2697371, upload-time = "2025-10-19T00:43:55.312Z" }, + { url = "https://files.pythonhosted.org/packages/15/87/c616577f0891d97860643c845f7221e95240aa589586de727e28a5eb6e52/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3c03137ee6103ba92d5d6ad6a510e86fded69cd67050bd8a1843f15283be17ac", size = 2992436, upload-time = "2025-10-19T00:43:57.253Z" }, + { url = "https://files.pythonhosted.org/packages/e7/9f/490c81bffb3428ab1fa114051fbb5ba18aaa2e2fe4da5bf4170ca524e6b3/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:be8e298d88f88bd172b59912240558be3b7a04959375646e7fd4996401452941", size = 2917612, upload-time = "2025-10-19T00:43:59.423Z" }, + { url = "https://files.pythonhosted.org/packages/66/35/0fec2769660ca6472bbf3317ab634675827bb706d193e3240aaf20eab961/cytoolz-1.1.0-cp314-cp314t-win32.whl", hash = "sha256:3d407140f5604a89578285d4aac7b18b8eafa055cf776e781aabb89c48738fad", size = 960842, upload-time = "2025-10-19T00:44:01.143Z" }, + { url = "https://files.pythonhosted.org/packages/46/b4/b7ce3d3cd20337becfec978ecfa6d0ef64884d0cf32d44edfed8700914b9/cytoolz-1.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:56e5afb69eb6e1b3ffc34716ee5f92ffbdb5cb003b3a5ca4d4b0fe700e217162", size = 1020835, upload-time = "2025-10-19T00:44:03.246Z" }, + { url = "https://files.pythonhosted.org/packages/2c/1f/0498009aa563a9c5d04f520aadc6e1c0942434d089d0b2f51ea986470f55/cytoolz-1.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:27b19b4a286b3ff52040efa42dbe403730aebe5fdfd2def704eb285e2125c63e", size = 927963, upload-time = "2025-10-19T00:44:04.85Z" }, +] + +[[package]] +name = "eth-abi" +version = "5.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "eth-typing" }, + { name = "eth-utils" }, + { name = "parsimonious" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/71/d9e1380bd77fd22f98b534699af564f189b56d539cc2b9dab908d4e4c242/eth_abi-5.2.0.tar.gz", hash = "sha256:178703fa98c07d8eecd5ae569e7e8d159e493ebb6eeb534a8fe973fbc4e40ef0", size = 49797, upload-time = "2025-01-14T16:29:34.629Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/b4/2f3982c4cbcbf5eeb6aec62df1533c0e63c653b3021ff338d44944405676/eth_abi-5.2.0-py3-none-any.whl", hash = "sha256:17abe47560ad753f18054f5b3089fcb588f3e3a092136a416b6c1502cb7e8877", size = 28511, upload-time = "2025-01-14T16:29:31.862Z" }, +] + +[[package]] +name = "eth-account" +version = "0.13.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "bitarray" }, + { name = "ckzg" }, + { name = "eth-abi" }, + { name = "eth-keyfile" }, + { name = "eth-keys" }, + { name = "eth-rlp" }, + { name = "eth-utils" }, + { name = "hexbytes" }, + { name = "pydantic" }, + { name = "rlp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/cf/20f76a29be97339c969fd765f1237154286a565a1d61be98e76bb7af946a/eth_account-0.13.7.tar.gz", hash = "sha256:5853ecbcbb22e65411176f121f5f24b8afeeaf13492359d254b16d8b18c77a46", size = 935998, upload-time = "2025-04-21T21:11:21.204Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/18/088fb250018cbe665bc2111974301b2d59f294a565aff7564c4df6878da2/eth_account-0.13.7-py3-none-any.whl", hash = "sha256:39727de8c94d004ff61d10da7587509c04d2dc7eac71e04830135300bdfc6d24", size = 587452, upload-time = "2025-04-21T21:11:18.346Z" }, +] + +[[package]] +name = "eth-hash" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/38/577b7bc9380ef9dff0f1dffefe0c9a1ded2385e7a06c306fd95afb6f9451/eth_hash-0.7.1.tar.gz", hash = "sha256:d2411a403a0b0a62e8247b4117932d900ffb4c8c64b15f92620547ca5ce46be5", size = 12227, upload-time = "2025-01-13T21:29:21.765Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/db/f8775490669d28aca24871c67dd56b3e72105cb3bcae9a4ec65dd70859b3/eth_hash-0.7.1-py3-none-any.whl", hash = "sha256:0fb1add2adf99ef28883fd6228eb447ef519ea72933535ad1a0b28c6f65f868a", size = 8028, upload-time = "2025-01-13T21:29:19.365Z" }, +] + +[[package]] +name = "eth-keyfile" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "eth-keys" }, + { name = "eth-utils" }, + { name = "pycryptodome" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/35/66/dd823b1537befefbbff602e2ada88f1477c5b40ec3731e3d9bc676c5f716/eth_keyfile-0.8.1.tar.gz", hash = "sha256:9708bc31f386b52cca0969238ff35b1ac72bd7a7186f2a84b86110d3c973bec1", size = 12267, upload-time = "2024-04-23T20:28:53.862Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/fc/48a586175f847dd9e05e5b8994d2fe8336098781ec2e9836a2ad94280281/eth_keyfile-0.8.1-py3-none-any.whl", hash = "sha256:65387378b82fe7e86d7cb9f8d98e6d639142661b2f6f490629da09fddbef6d64", size = 7510, upload-time = "2024-04-23T20:28:51.063Z" }, +] + +[[package]] +name = "eth-keys" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "eth-typing" }, + { name = "eth-utils" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/11/1ed831c50bd74f57829aa06e58bd82a809c37e070ee501c953b9ac1f1552/eth_keys-0.7.0.tar.gz", hash = "sha256:79d24fd876201df67741de3e3fefb3f4dbcbb6ace66e47e6fe662851a4547814", size = 30166, upload-time = "2025-04-07T17:40:21.697Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/25/0ae00f2b0095e559d61ad3dc32171bd5a29dfd95ab04b4edd641f7c75f72/eth_keys-0.7.0-py3-none-any.whl", hash = "sha256:b0cdda8ffe8e5ba69c7c5ca33f153828edcace844f67aabd4542d7de38b159cf", size = 20656, upload-time = "2025-04-07T17:40:20.441Z" }, +] + +[[package]] +name = "eth-rlp" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "eth-utils" }, + { name = "hexbytes" }, + { name = "rlp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7f/ea/ad39d001fa9fed07fad66edb00af701e29b48be0ed44a3bcf58cb3adf130/eth_rlp-2.2.0.tar.gz", hash = "sha256:5e4b2eb1b8213e303d6a232dfe35ab8c29e2d3051b86e8d359def80cd21db83d", size = 7720, upload-time = "2025-02-04T21:51:08.134Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/3b/57efe2bc2df0980680d57c01a36516cd3171d2319ceb30e675de19fc2cc5/eth_rlp-2.2.0-py3-none-any.whl", hash = "sha256:5692d595a741fbaef1203db6a2fedffbd2506d31455a6ad378c8449ee5985c47", size = 4446, upload-time = "2025-02-04T21:51:05.823Z" }, +] + +[[package]] +name = "eth-typing" +version = "5.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/54/62aa24b9cc708f06316167ee71c362779c8ed21fc8234a5cd94a8f53b623/eth_typing-5.2.1.tar.gz", hash = "sha256:7557300dbf02a93c70fa44af352b5c4a58f94e997a0fd6797fb7d1c29d9538ee", size = 21806, upload-time = "2025-04-14T20:39:28.217Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/72/c370bbe4c53da7bf998d3523f5a0f38867654923a82192df88d0705013d3/eth_typing-5.2.1-py3-none-any.whl", hash = "sha256:b0c2812ff978267563b80e9d701f487dd926f1d376d674f3b535cfe28b665d3d", size = 19163, upload-time = "2025-04-14T20:39:26.571Z" }, +] + +[[package]] +name = "eth-utils" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cytoolz", marker = "implementation_name == 'cpython'" }, + { name = "eth-hash" }, + { name = "eth-typing" }, + { name = "pydantic" }, + { name = "toolz", marker = "implementation_name == 'pypy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e6/e1/ee3a8728227c3558853e63ff35bd4c449abdf5022a19601369400deacd39/eth_utils-5.3.1.tar.gz", hash = "sha256:c94e2d2abd024a9a42023b4ddc1c645814ff3d6a737b33d5cfd890ebf159c2d1", size = 123506, upload-time = "2025-08-27T16:37:17.378Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/4d/257cdc01ada430b8e84b9f2385c2553f33218f5b47da9adf0a616308d4b7/eth_utils-5.3.1-py3-none-any.whl", hash = "sha256:1f5476d8f29588d25b8ae4987e1ffdfae6d4c09026e476c4aad13b32dda3ead0", size = 102529, upload-time = "2025-08-27T16:37:15.449Z" }, +] + +[[package]] +name = "hexbytes" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/87/adf4635b4b8c050283d74e6db9a81496063229c9263e6acc1903ab79fbec/hexbytes-1.3.1.tar.gz", hash = "sha256:a657eebebdfe27254336f98d8af6e2236f3f83aed164b87466b6cf6c5f5a4765", size = 8633, upload-time = "2025-05-14T16:45:17.5Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/e0/3b31492b1c89da3c5a846680517871455b30c54738486fc57ac79a5761bd/hexbytes-1.3.1-py3-none-any.whl", hash = "sha256:da01ff24a1a9a2b1881c4b85f0e9f9b0f51b526b379ffa23832ae7899d29c2c7", size = 5074, upload-time = "2025-05-14T16:45:16.179Z" }, +] + +[[package]] +name = "hyperliquid-python-sdk" +version = "0.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "eth-account" }, + { name = "eth-utils" }, + { name = "msgpack" }, + { name = "requests" }, + { name = "websocket-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/7a/f0497091909094a722a3fda64a3fbf180adfacbda3c628b04c5267f230b0/hyperliquid_python_sdk-0.22.0.tar.gz", hash = "sha256:a26e3e015e050c094c411196e689ffaac4ff942fb0ef63bb2863502f63b33e5b", size = 25119, upload-time = "2026-02-04T19:06:13.886Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dd/74/f8f229f2c659cbf477539a6dccc24452ebe4130d960ba75f7485ee6c0551/hyperliquid_python_sdk-0.22.0-py3-none-any.whl", hash = "sha256:d47432d5dc5ea33cd6c60e01dcb0d773e822ee29c6b299042e59a8cd2c4e2c9f", size = 24529, upload-time = "2026-02-04T19:06:12.46Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "linkify-it-py" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "uc-micro-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/c9/06ea13676ef354f0af6169587ae292d3e2406e212876a413bf9eece4eb23/linkify_it_py-2.1.0.tar.gz", hash = "sha256:43360231720999c10e9328dc3691160e27a718e280673d444c38d7d3aaa3b98b", size = 29158, upload-time = "2026-03-01T07:48:47.683Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/de/88b3be5c31b22333b3ca2f6ff1de4e863d8fe45aaea7485f591970ec1d3e/linkify_it_py-2.1.0-py3-none-any.whl", hash = "sha256:0d252c1594ecba2ecedc444053db5d3a9b7ec1b0dd929c8f1d74dce89f86c05e", size = 19878, upload-time = "2026-03-01T07:48:46.098Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[package.optional-dependencies] +linkify = [ + { name = "linkify-it-py" }, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b2/fd/a756d36c0bfba5f6e39a1cdbdbfdd448dc02692467d83816dff4592a1ebc/mdit_py_plugins-0.5.0.tar.gz", hash = "sha256:f4918cb50119f50446560513a8e311d574ff6aaed72606ddae6d35716fe809c6", size = 44655, upload-time = "2025-08-11T07:25:49.083Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/86/dd6e5db36df29e76c7a7699123569a4a18c1623ce68d826ed96c62643cae/mdit_py_plugins-0.5.0-py3-none-any.whl", hash = "sha256:07a08422fc1936a5d26d146759e9155ea466e842f5ab2f7d2266dd084c8dab1f", size = 57205, upload-time = "2025-08-11T07:25:47.597Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "msgpack" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939, upload-time = "2025-10-08T09:15:01.472Z" }, + { url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064, upload-time = "2025-10-08T09:15:03.764Z" }, + { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" }, + { url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013, upload-time = "2025-10-08T09:15:09.83Z" }, + { url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096, upload-time = "2025-10-08T09:15:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708, upload-time = "2025-10-08T09:15:12.554Z" }, + { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" }, + { url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212, upload-time = "2025-10-08T09:15:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315, upload-time = "2025-10-08T09:15:15.543Z" }, + { url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721, upload-time = "2025-10-08T09:15:16.567Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657, upload-time = "2025-10-08T09:15:17.825Z" }, + { url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668, upload-time = "2025-10-08T09:15:19.003Z" }, + { url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040, upload-time = "2025-10-08T09:15:20.183Z" }, + { url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037, upload-time = "2025-10-08T09:15:21.416Z" }, + { url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631, upload-time = "2025-10-08T09:15:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118, upload-time = "2025-10-08T09:15:23.402Z" }, + { url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", size = 81127, upload-time = "2025-10-08T09:15:24.408Z" }, + { url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", size = 84981, upload-time = "2025-10-08T09:15:25.812Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", size = 411885, upload-time = "2025-10-08T09:15:27.22Z" }, + { url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", size = 419658, upload-time = "2025-10-08T09:15:28.4Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", size = 403290, upload-time = "2025-10-08T09:15:29.764Z" }, + { url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", size = 415234, upload-time = "2025-10-08T09:15:31.022Z" }, + { url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", size = 66391, upload-time = "2025-10-08T09:15:32.265Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", size = 73787, upload-time = "2025-10-08T09:15:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", size = 66453, upload-time = "2025-10-08T09:15:34.225Z" }, + { url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", size = 85264, upload-time = "2025-10-08T09:15:35.61Z" }, + { url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", size = 89076, upload-time = "2025-10-08T09:15:36.619Z" }, + { url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", size = 435242, upload-time = "2025-10-08T09:15:37.647Z" }, + { url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", size = 432509, upload-time = "2025-10-08T09:15:38.794Z" }, + { url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", size = 415957, upload-time = "2025-10-08T09:15:40.238Z" }, + { url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", size = 422910, upload-time = "2025-10-08T09:15:41.505Z" }, + { url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197, upload-time = "2025-10-08T09:15:42.954Z" }, + { url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772, upload-time = "2025-10-08T09:15:43.954Z" }, + { url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868, upload-time = "2025-10-08T09:15:44.959Z" }, +] + +[[package]] +name = "numpy" +version = "2.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/10/8b/c265f4823726ab832de836cdd184d0986dcf94480f81e8739692a7ac7af2/numpy-2.4.3.tar.gz", hash = "sha256:483a201202b73495f00dbc83796c6ae63137a9bdade074f7648b3e32613412dd", size = 20727743, upload-time = "2026-03-09T07:58:53.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/ed/6388632536f9788cea23a3a1b629f25b43eaacd7d7377e5d6bc7b9deb69b/numpy-2.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:61b0cbabbb6126c8df63b9a3a0c4b1f44ebca5e12ff6997b80fcf267fb3150ef", size = 16669628, upload-time = "2026-03-09T07:56:24.252Z" }, + { url = "https://files.pythonhosted.org/packages/74/1b/ee2abfc68e1ce728b2958b6ba831d65c62e1b13ce3017c13943f8f9b5b2e/numpy-2.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7395e69ff32526710748f92cd8c9849b361830968ea3e24a676f272653e8983e", size = 14696872, upload-time = "2026-03-09T07:56:26.991Z" }, + { url = "https://files.pythonhosted.org/packages/ba/d1/780400e915ff5638166f11ca9dc2c5815189f3d7cf6f8759a1685e586413/numpy-2.4.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:abdce0f71dcb4a00e4e77f3faf05e4616ceccfe72ccaa07f47ee79cda3b7b0f4", size = 5203489, upload-time = "2026-03-09T07:56:29.414Z" }, + { url = "https://files.pythonhosted.org/packages/0b/bb/baffa907e9da4cc34a6e556d6d90e032f6d7a75ea47968ea92b4858826c4/numpy-2.4.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:48da3a4ee1336454b07497ff7ec83903efa5505792c4e6d9bf83d99dc07a1e18", size = 6550814, upload-time = "2026-03-09T07:56:32.225Z" }, + { url = "https://files.pythonhosted.org/packages/7b/12/8c9f0c6c95f76aeb20fc4a699c33e9f827fa0d0f857747c73bb7b17af945/numpy-2.4.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:32e3bef222ad6b052280311d1d60db8e259e4947052c3ae7dd6817451fc8a4c5", size = 15666601, upload-time = "2026-03-09T07:56:34.461Z" }, + { url = "https://files.pythonhosted.org/packages/bd/79/cc665495e4d57d0aa6fbcc0aa57aa82671dfc78fbf95fe733ed86d98f52a/numpy-2.4.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e7dd01a46700b1967487141a66ac1a3cf0dd8ebf1f08db37d46389401512ca97", size = 16621358, upload-time = "2026-03-09T07:56:36.852Z" }, + { url = "https://files.pythonhosted.org/packages/a8/40/b4ecb7224af1065c3539f5ecfff879d090de09608ad1008f02c05c770cb3/numpy-2.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:76f0f283506c28b12bba319c0fab98217e9f9b54e6160e9c79e9f7348ba32e9c", size = 17016135, upload-time = "2026-03-09T07:56:39.337Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b1/6a88e888052eed951afed7a142dcdf3b149a030ca59b4c71eef085858e43/numpy-2.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:737f630a337364665aba3b5a77e56a68cc42d350edd010c345d65a3efa3addcc", size = 18345816, upload-time = "2026-03-09T07:56:42.31Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8f/103a60c5f8c3d7fc678c19cd7b2476110da689ccb80bc18050efbaeae183/numpy-2.4.3-cp312-cp312-win32.whl", hash = "sha256:26952e18d82a1dbbc2f008d402021baa8d6fc8e84347a2072a25e08b46d698b9", size = 5960132, upload-time = "2026-03-09T07:56:44.851Z" }, + { url = "https://files.pythonhosted.org/packages/d7/7c/f5ee1bf6ed888494978046a809df2882aad35d414b622893322df7286879/numpy-2.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:65f3c2455188f09678355f5cae1f959a06b778bc66d535da07bf2ef20cd319d5", size = 12316144, upload-time = "2026-03-09T07:56:47.057Z" }, + { url = "https://files.pythonhosted.org/packages/71/46/8d1cb3f7a00f2fb6394140e7e6623696e54c6318a9d9691bb4904672cf42/numpy-2.4.3-cp312-cp312-win_arm64.whl", hash = "sha256:2abad5c7fef172b3377502bde47892439bae394a71bc329f31df0fd829b41a9e", size = 10220364, upload-time = "2026-03-09T07:56:49.849Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d0/1fe47a98ce0df229238b77611340aff92d52691bcbc10583303181abf7fc/numpy-2.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b346845443716c8e542d54112966383b448f4a3ba5c66409771b8c0889485dd3", size = 16665297, upload-time = "2026-03-09T07:56:52.296Z" }, + { url = "https://files.pythonhosted.org/packages/27/d9/4e7c3f0e68dfa91f21c6fb6cf839bc829ec920688b1ce7ec722b1a6202fb/numpy-2.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2629289168f4897a3c4e23dc98d6f1731f0fc0fe52fb9db19f974041e4cc12b9", size = 14691853, upload-time = "2026-03-09T07:56:54.992Z" }, + { url = "https://files.pythonhosted.org/packages/3a/66/bd096b13a87549683812b53ab211e6d413497f84e794fb3c39191948da97/numpy-2.4.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:bb2e3cf95854233799013779216c57e153c1ee67a0bf92138acca0e429aefaee", size = 5198435, upload-time = "2026-03-09T07:56:57.184Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2f/687722910b5a5601de2135c891108f51dfc873d8e43c8ed9f4ebb440b4a2/numpy-2.4.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:7f3408ff897f8ab07a07fbe2823d7aee6ff644c097cc1f90382511fe982f647f", size = 6546347, upload-time = "2026-03-09T07:56:59.531Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ec/7971c4e98d86c564750393fab8d7d83d0a9432a9d78bb8a163a6dc59967a/numpy-2.4.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:decb0eb8a53c3b009b0962378065589685d66b23467ef5dac16cbe818afde27f", size = 15664626, upload-time = "2026-03-09T07:57:01.385Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/7daecbea84ec935b7fc732e18f532073064a3816f0932a40a17f3349185f/numpy-2.4.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5f51900414fc9204a0e0da158ba2ac52b75656e7dce7e77fb9f84bfa343b4cc", size = 16608916, upload-time = "2026-03-09T07:57:04.008Z" }, + { url = "https://files.pythonhosted.org/packages/df/58/2a2b4a817ffd7472dca4421d9f0776898b364154e30c95f42195041dc03b/numpy-2.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6bd06731541f89cdc01b261ba2c9e037f1543df7472517836b78dfb15bd6e476", size = 17015824, upload-time = "2026-03-09T07:57:06.347Z" }, + { url = "https://files.pythonhosted.org/packages/4a/ca/627a828d44e78a418c55f82dd4caea8ea4a8ef24e5144d9e71016e52fb40/numpy-2.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:22654fe6be0e5206f553a9250762c653d3698e46686eee53b399ab90da59bd92", size = 18334581, upload-time = "2026-03-09T07:57:09.114Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c0/76f93962fc79955fcba30a429b62304332345f22d4daec1cb33653425643/numpy-2.4.3-cp313-cp313-win32.whl", hash = "sha256:d71e379452a2f670ccb689ec801b1218cd3983e253105d6e83780967e899d687", size = 5958618, upload-time = "2026-03-09T07:57:11.432Z" }, + { url = "https://files.pythonhosted.org/packages/b1/3c/88af0040119209b9b5cb59485fa48b76f372c73068dbf9254784b975ac53/numpy-2.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:0a60e17a14d640f49146cb38e3f105f571318db7826d9b6fef7e4dce758faecd", size = 12312824, upload-time = "2026-03-09T07:57:13.586Z" }, + { url = "https://files.pythonhosted.org/packages/58/ce/3d07743aced3d173f877c3ef6a454c2174ba42b584ab0b7e6d99374f51ed/numpy-2.4.3-cp313-cp313-win_arm64.whl", hash = "sha256:c9619741e9da2059cd9c3f206110b97583c7152c1dc9f8aafd4beb450ac1c89d", size = 10221218, upload-time = "2026-03-09T07:57:16.183Z" }, + { url = "https://files.pythonhosted.org/packages/62/09/d96b02a91d09e9d97862f4fc8bfebf5400f567d8eb1fe4b0cc4795679c15/numpy-2.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7aa4e54f6469300ebca1d9eb80acd5253cdfa36f2c03d79a35883687da430875", size = 14819570, upload-time = "2026-03-09T07:57:18.564Z" }, + { url = "https://files.pythonhosted.org/packages/b5/ca/0b1aba3905fdfa3373d523b2b15b19029f4f3031c87f4066bd9d20ef6c6b/numpy-2.4.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d1b90d840b25874cf5cd20c219af10bac3667db3876d9a495609273ebe679070", size = 5326113, upload-time = "2026-03-09T07:57:21.052Z" }, + { url = "https://files.pythonhosted.org/packages/c0/63/406e0fd32fcaeb94180fd6a4c41e55736d676c54346b7efbce548b94a914/numpy-2.4.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a749547700de0a20a6718293396ec237bb38218049cfce788e08fcb716e8cf73", size = 6646370, upload-time = "2026-03-09T07:57:22.804Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d0/10f7dc157d4b37af92720a196be6f54f889e90dcd30dce9dc657ed92c257/numpy-2.4.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94f3c4a151a2e529adf49c1d54f0f57ff8f9b233ee4d44af623a81553ab86368", size = 15723499, upload-time = "2026-03-09T07:57:24.693Z" }, + { url = "https://files.pythonhosted.org/packages/66/f1/d1c2bf1161396629701bc284d958dc1efa3a5a542aab83cf11ee6eb4cba5/numpy-2.4.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22c31dc07025123aedf7f2db9e91783df13f1776dc52c6b22c620870dc0fab22", size = 16657164, upload-time = "2026-03-09T07:57:27.676Z" }, + { url = "https://files.pythonhosted.org/packages/1a/be/cca19230b740af199ac47331a21c71e7a3d0ba59661350483c1600d28c37/numpy-2.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:148d59127ac95979d6f07e4d460f934ebdd6eed641db9c0db6c73026f2b2101a", size = 17081544, upload-time = "2026-03-09T07:57:30.664Z" }, + { url = "https://files.pythonhosted.org/packages/b9/c5/9602b0cbb703a0936fb40f8a95407e8171935b15846de2f0776e08af04c7/numpy-2.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a97cbf7e905c435865c2d939af3d93f99d18eaaa3cabe4256f4304fb51604349", size = 18380290, upload-time = "2026-03-09T07:57:33.763Z" }, + { url = "https://files.pythonhosted.org/packages/ed/81/9f24708953cd30be9ee36ec4778f4b112b45165812f2ada4cc5ea1c1f254/numpy-2.4.3-cp313-cp313t-win32.whl", hash = "sha256:be3b8487d725a77acccc9924f65fd8bce9af7fac8c9820df1049424a2115af6c", size = 6082814, upload-time = "2026-03-09T07:57:36.491Z" }, + { url = "https://files.pythonhosted.org/packages/e2/9e/52f6eaa13e1a799f0ab79066c17f7016a4a8ae0c1aefa58c82b4dab690b4/numpy-2.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1ec84fd7c8e652b0f4aaaf2e6e9cc8eaa9b1b80a537e06b2e3a2fb176eedcb26", size = 12452673, upload-time = "2026-03-09T07:57:38.281Z" }, + { url = "https://files.pythonhosted.org/packages/c4/04/b8cece6ead0b30c9fbd99bb835ad7ea0112ac5f39f069788c5558e3b1ab2/numpy-2.4.3-cp313-cp313t-win_arm64.whl", hash = "sha256:120df8c0a81ebbf5b9020c91439fccd85f5e018a927a39f624845be194a2be02", size = 10290907, upload-time = "2026-03-09T07:57:40.747Z" }, + { url = "https://files.pythonhosted.org/packages/70/ae/3936f79adebf8caf81bd7a599b90a561334a658be4dcc7b6329ebf4ee8de/numpy-2.4.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:5884ce5c7acfae1e4e1b6fde43797d10aa506074d25b531b4f54bde33c0c31d4", size = 16664563, upload-time = "2026-03-09T07:57:43.817Z" }, + { url = "https://files.pythonhosted.org/packages/9b/62/760f2b55866b496bb1fa7da2a6db076bef908110e568b02fcfc1422e2a3a/numpy-2.4.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:297837823f5bc572c5f9379b0c9f3a3365f08492cbdc33bcc3af174372ebb168", size = 14702161, upload-time = "2026-03-09T07:57:46.169Z" }, + { url = "https://files.pythonhosted.org/packages/32/af/a7a39464e2c0a21526fb4fb76e346fb172ebc92f6d1c7a07c2c139cc17b1/numpy-2.4.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:a111698b4a3f8dcbe54c64a7708f049355abd603e619013c346553c1fd4ca90b", size = 5208738, upload-time = "2026-03-09T07:57:48.506Z" }, + { url = "https://files.pythonhosted.org/packages/29/8c/2a0cf86a59558fa078d83805589c2de490f29ed4fb336c14313a161d358a/numpy-2.4.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:4bd4741a6a676770e0e97fe9ab2e51de01183df3dcbcec591d26d331a40de950", size = 6543618, upload-time = "2026-03-09T07:57:50.591Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b8/612ce010c0728b1c363fa4ea3aa4c22fe1c5da1de008486f8c2f5cb92fae/numpy-2.4.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:54f29b877279d51e210e0c80709ee14ccbbad647810e8f3d375561c45ef613dd", size = 15680676, upload-time = "2026-03-09T07:57:52.34Z" }, + { url = "https://files.pythonhosted.org/packages/a9/7e/4f120ecc54ba26ddf3dc348eeb9eb063f421de65c05fc961941798feea18/numpy-2.4.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:679f2a834bae9020f81534671c56fd0cc76dd7e5182f57131478e23d0dc59e24", size = 16613492, upload-time = "2026-03-09T07:57:54.91Z" }, + { url = "https://files.pythonhosted.org/packages/2c/86/1b6020db73be330c4b45d5c6ee4295d59cfeef0e3ea323959d053e5a6909/numpy-2.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d84f0f881cb2225c2dfd7f78a10a5645d487a496c6668d6cc39f0f114164f3d0", size = 17031789, upload-time = "2026-03-09T07:57:57.641Z" }, + { url = "https://files.pythonhosted.org/packages/07/3a/3b90463bf41ebc21d1b7e06079f03070334374208c0f9a1f05e4ae8455e7/numpy-2.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d213c7e6e8d211888cc359bab7199670a00f5b82c0978b9d1c75baf1eddbeac0", size = 18339941, upload-time = "2026-03-09T07:58:00.577Z" }, + { url = "https://files.pythonhosted.org/packages/a8/74/6d736c4cd962259fd8bae9be27363eb4883a2f9069763747347544c2a487/numpy-2.4.3-cp314-cp314-win32.whl", hash = "sha256:52077feedeff7c76ed7c9f1a0428558e50825347b7545bbb8523da2cd55c547a", size = 6007503, upload-time = "2026-03-09T07:58:03.331Z" }, + { url = "https://files.pythonhosted.org/packages/48/39/c56ef87af669364356bb011922ef0734fc49dad51964568634c72a009488/numpy-2.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:0448e7f9caefb34b4b7dd2b77f21e8906e5d6f0365ad525f9f4f530b13df2afc", size = 12444915, upload-time = "2026-03-09T07:58:06.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/1f/ab8528e38d295fd349310807496fabb7cf9fe2e1f70b97bc20a483ea9d4a/numpy-2.4.3-cp314-cp314-win_arm64.whl", hash = "sha256:b44fd60341c4d9783039598efadd03617fa28d041fc37d22b62d08f2027fa0e7", size = 10494875, upload-time = "2026-03-09T07:58:08.734Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ef/b7c35e4d5ef141b836658ab21a66d1a573e15b335b1d111d31f26c8ef80f/numpy-2.4.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0a195f4216be9305a73c0e91c9b026a35f2161237cf1c6de9b681637772ea657", size = 14822225, upload-time = "2026-03-09T07:58:11.034Z" }, + { url = "https://files.pythonhosted.org/packages/cd/8d/7730fa9278cf6648639946cc816e7cc89f0d891602584697923375f801ed/numpy-2.4.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:cd32fbacb9fd1bf041bf8e89e4576b6f00b895f06d00914820ae06a616bdfef7", size = 5328769, upload-time = "2026-03-09T07:58:13.67Z" }, + { url = "https://files.pythonhosted.org/packages/47/01/d2a137317c958b074d338807c1b6a383406cdf8b8e53b075d804cc3d211d/numpy-2.4.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:2e03c05abaee1f672e9d67bc858f300b5ccba1c21397211e8d77d98350972093", size = 6649461, upload-time = "2026-03-09T07:58:15.912Z" }, + { url = "https://files.pythonhosted.org/packages/5c/34/812ce12bc0f00272a4b0ec0d713cd237cb390666eb6206323d1cc9cedbb2/numpy-2.4.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d1ce23cce91fcea443320a9d0ece9b9305d4368875bab09538f7a5b4131938a", size = 15725809, upload-time = "2026-03-09T07:58:17.787Z" }, + { url = "https://files.pythonhosted.org/packages/25/c0/2aed473a4823e905e765fee3dc2cbf504bd3e68ccb1150fbdabd5c39f527/numpy-2.4.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c59020932feb24ed49ffd03704fbab89f22aa9c0d4b180ff45542fe8918f5611", size = 16655242, upload-time = "2026-03-09T07:58:20.476Z" }, + { url = "https://files.pythonhosted.org/packages/f2/c8/7e052b2fc87aa0e86de23f20e2c42bd261c624748aa8efd2c78f7bb8d8c6/numpy-2.4.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9684823a78a6cd6ad7511fc5e25b07947d1d5b5e2812c93fe99d7d4195130720", size = 17080660, upload-time = "2026-03-09T07:58:23.067Z" }, + { url = "https://files.pythonhosted.org/packages/f3/3d/0876746044db2adcb11549f214d104f2e1be00f07a67edbb4e2812094847/numpy-2.4.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0200b25c687033316fb39f0ff4e3e690e8957a2c3c8d22499891ec58c37a3eb5", size = 18380384, upload-time = "2026-03-09T07:58:25.839Z" }, + { url = "https://files.pythonhosted.org/packages/07/12/8160bea39da3335737b10308df4f484235fd297f556745f13092aa039d3b/numpy-2.4.3-cp314-cp314t-win32.whl", hash = "sha256:5e10da9e93247e554bb1d22f8edc51847ddd7dde52d85ce31024c1b4312bfba0", size = 6154547, upload-time = "2026-03-09T07:58:28.289Z" }, + { url = "https://files.pythonhosted.org/packages/42/f3/76534f61f80d74cc9cdf2e570d3d4eeb92c2280a27c39b0aaf471eda7b48/numpy-2.4.3-cp314-cp314t-win_amd64.whl", hash = "sha256:45f003dbdffb997a03da2d1d0cb41fbd24a87507fb41605c0420a3db5bd4667b", size = 12633645, upload-time = "2026-03-09T07:58:30.384Z" }, + { url = "https://files.pythonhosted.org/packages/1f/b6/7c0d4334c15983cec7f92a69e8ce9b1e6f31857e5ee3a413ac424e6bd63d/numpy-2.4.3-cp314-cp314t-win_arm64.whl", hash = "sha256:4d382735cecd7bcf090172489a525cd7d4087bc331f7df9f60ddc9a296cf208e", size = 10565454, upload-time = "2026-03-09T07:58:33.031Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "pandas" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "tzdata", marker = "sys_platform == 'emscripten' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/0c/b28ed414f080ee0ad153f848586d61d1878f91689950f037f976ce15f6c8/pandas-3.0.1.tar.gz", hash = "sha256:4186a699674af418f655dbd420ed87f50d56b4cd6603784279d9eef6627823c8", size = 4641901, upload-time = "2026-02-17T22:20:16.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/51/b467209c08dae2c624873d7491ea47d2b47336e5403309d433ea79c38571/pandas-3.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:476f84f8c20c9f5bc47252b66b4bb25e1a9fc2fa98cead96744d8116cb85771d", size = 10344357, upload-time = "2026-02-17T22:18:38.262Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f1/e2567ffc8951ab371db2e40b2fe068e36b81d8cf3260f06ae508700e5504/pandas-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ab749dfba921edf641d4036c4c21c0b3ea70fea478165cb98a998fb2a261955", size = 9884543, upload-time = "2026-02-17T22:18:41.476Z" }, + { url = "https://files.pythonhosted.org/packages/d7/39/327802e0b6d693182403c144edacbc27eb82907b57062f23ef5a4c4a5ea7/pandas-3.0.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8e36891080b87823aff3640c78649b91b8ff6eea3c0d70aeabd72ea43ab069b", size = 10396030, upload-time = "2026-02-17T22:18:43.822Z" }, + { url = "https://files.pythonhosted.org/packages/3d/fe/89d77e424365280b79d99b3e1e7d606f5165af2f2ecfaf0c6d24c799d607/pandas-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:532527a701281b9dd371e2f582ed9094f4c12dd9ffb82c0c54ee28d8ac9520c4", size = 10876435, upload-time = "2026-02-17T22:18:45.954Z" }, + { url = "https://files.pythonhosted.org/packages/b5/a6/2a75320849dd154a793f69c951db759aedb8d1dd3939eeacda9bdcfa1629/pandas-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:356e5c055ed9b0da1580d465657bc7d00635af4fd47f30afb23025352ba764d1", size = 11405133, upload-time = "2026-02-17T22:18:48.533Z" }, + { url = "https://files.pythonhosted.org/packages/58/53/1d68fafb2e02d7881df66aa53be4cd748d25cbe311f3b3c85c93ea5d30ca/pandas-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9d810036895f9ad6345b8f2a338dd6998a74e8483847403582cab67745bff821", size = 11932065, upload-time = "2026-02-17T22:18:50.837Z" }, + { url = "https://files.pythonhosted.org/packages/75/08/67cc404b3a966b6df27b38370ddd96b3b023030b572283d035181854aac5/pandas-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:536232a5fe26dd989bd633e7a0c450705fdc86a207fec7254a55e9a22950fe43", size = 9741627, upload-time = "2026-02-17T22:18:53.905Z" }, + { url = "https://files.pythonhosted.org/packages/86/4f/caf9952948fb00d23795f09b893d11f1cacb384e666854d87249530f7cbe/pandas-3.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:0f463ebfd8de7f326d38037c7363c6dacb857c5881ab8961fb387804d6daf2f7", size = 9052483, upload-time = "2026-02-17T22:18:57.31Z" }, + { url = "https://files.pythonhosted.org/packages/0b/48/aad6ec4f8d007534c091e9a7172b3ec1b1ee6d99a9cbb936b5eab6c6cf58/pandas-3.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5272627187b5d9c20e55d27caf5f2cd23e286aba25cadf73c8590e432e2b7262", size = 10317509, upload-time = "2026-02-17T22:18:59.498Z" }, + { url = "https://files.pythonhosted.org/packages/a8/14/5990826f779f79148ae9d3a2c39593dc04d61d5d90541e71b5749f35af95/pandas-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:661e0f665932af88c7877f31da0dc743fe9c8f2524bdffe23d24fdcb67ef9d56", size = 9860561, upload-time = "2026-02-17T22:19:02.265Z" }, + { url = "https://files.pythonhosted.org/packages/fa/80/f01ff54664b6d70fed71475543d108a9b7c888e923ad210795bef04ffb7d/pandas-3.0.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:75e6e292ff898679e47a2199172593d9f6107fd2dd3617c22c2946e97d5df46e", size = 10365506, upload-time = "2026-02-17T22:19:05.017Z" }, + { url = "https://files.pythonhosted.org/packages/f2/85/ab6d04733a7d6ff32bfc8382bf1b07078228f5d6ebec5266b91bfc5c4ff7/pandas-3.0.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ff8cf1d2896e34343197685f432450ec99a85ba8d90cce2030c5eee2ef98791", size = 10873196, upload-time = "2026-02-17T22:19:07.204Z" }, + { url = "https://files.pythonhosted.org/packages/48/a9/9301c83d0b47c23ac5deab91c6b39fd98d5b5db4d93b25df8d381451828f/pandas-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eca8b4510f6763f3d37359c2105df03a7a221a508f30e396a51d0713d462e68a", size = 11370859, upload-time = "2026-02-17T22:19:09.436Z" }, + { url = "https://files.pythonhosted.org/packages/59/fe/0c1fc5bd2d29c7db2ab372330063ad555fb83e08422829c785f5ec2176ca/pandas-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:06aff2ad6f0b94a17822cf8b83bbb563b090ed82ff4fe7712db2ce57cd50d9b8", size = 11924584, upload-time = "2026-02-17T22:19:11.562Z" }, + { url = "https://files.pythonhosted.org/packages/d6/7d/216a1588b65a7aa5f4535570418a599d943c85afb1d95b0876fc00aa1468/pandas-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:9fea306c783e28884c29057a1d9baa11a349bbf99538ec1da44c8476563d1b25", size = 9742769, upload-time = "2026-02-17T22:19:13.926Z" }, + { url = "https://files.pythonhosted.org/packages/c4/cb/810a22a6af9a4e97c8ab1c946b47f3489c5bca5adc483ce0ffc84c9cc768/pandas-3.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:a8d37a43c52917427e897cb2e429f67a449327394396a81034a4449b99afda59", size = 9043855, upload-time = "2026-02-17T22:19:16.09Z" }, + { url = "https://files.pythonhosted.org/packages/92/fa/423c89086cca1f039cf1253c3ff5b90f157b5b3757314aa635f6bf3e30aa/pandas-3.0.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d54855f04f8246ed7b6fc96b05d4871591143c46c0b6f4af874764ed0d2d6f06", size = 10752673, upload-time = "2026-02-17T22:19:18.304Z" }, + { url = "https://files.pythonhosted.org/packages/22/23/b5a08ec1f40020397f0faba72f1e2c11f7596a6169c7b3e800abff0e433f/pandas-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e1b677accee34a09e0dc2ce5624e4a58a1870ffe56fc021e9caf7f23cd7668f", size = 10404967, upload-time = "2026-02-17T22:19:20.726Z" }, + { url = "https://files.pythonhosted.org/packages/5c/81/94841f1bb4afdc2b52a99daa895ac2c61600bb72e26525ecc9543d453ebc/pandas-3.0.1-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a9cabbdcd03f1b6cd254d6dda8ae09b0252524be1592594c00b7895916cb1324", size = 10320575, upload-time = "2026-02-17T22:19:24.919Z" }, + { url = "https://files.pythonhosted.org/packages/0a/8b/2ae37d66a5342a83adadfd0cb0b4bf9c3c7925424dd5f40d15d6cfaa35ee/pandas-3.0.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ae2ab1f166668b41e770650101e7090824fd34d17915dd9cd479f5c5e0065e9", size = 10710921, upload-time = "2026-02-17T22:19:27.181Z" }, + { url = "https://files.pythonhosted.org/packages/a2/61/772b2e2757855e232b7ccf7cb8079a5711becb3a97f291c953def15a833f/pandas-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6bf0603c2e30e2cafac32807b06435f28741135cb8697eae8b28c7d492fc7d76", size = 11334191, upload-time = "2026-02-17T22:19:29.411Z" }, + { url = "https://files.pythonhosted.org/packages/1b/08/b16c6df3ef555d8495d1d265a7963b65be166785d28f06a350913a4fac78/pandas-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c426422973973cae1f4a23e51d4ae85974f44871b24844e4f7de752dd877098", size = 11782256, upload-time = "2026-02-17T22:19:32.34Z" }, + { url = "https://files.pythonhosted.org/packages/55/80/178af0594890dee17e239fca96d3d8670ba0f5ff59b7d0439850924a9c09/pandas-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b03f91ae8c10a85c1613102c7bef5229b5379f343030a3ccefeca8a33414cf35", size = 10485047, upload-time = "2026-02-17T22:19:34.605Z" }, + { url = "https://files.pythonhosted.org/packages/bb/8b/4bb774a998b97e6c2fd62a9e6cfdaae133b636fd1c468f92afb4ae9a447a/pandas-3.0.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:99d0f92ed92d3083d140bf6b97774f9f13863924cf3f52a70711f4e7588f9d0a", size = 10322465, upload-time = "2026-02-17T22:19:36.803Z" }, + { url = "https://files.pythonhosted.org/packages/72/3a/5b39b51c64159f470f1ca3b1c2a87da290657ca022f7cd11442606f607d1/pandas-3.0.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3b66857e983208654294bb6477b8a63dee26b37bdd0eb34d010556e91261784f", size = 9910632, upload-time = "2026-02-17T22:19:39.001Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f7/b449ffb3f68c11da12fc06fbf6d2fa3a41c41e17d0284d23a79e1c13a7e4/pandas-3.0.1-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56cf59638bf24dc9bdf2154c81e248b3289f9a09a6d04e63608c159022352749", size = 10440535, upload-time = "2026-02-17T22:19:41.157Z" }, + { url = "https://files.pythonhosted.org/packages/55/77/6ea82043db22cb0f2bbfe7198da3544000ddaadb12d26be36e19b03a2dc5/pandas-3.0.1-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1a9f55e0f46951874b863d1f3906dcb57df2d9be5c5847ba4dfb55b2c815249", size = 10893940, upload-time = "2026-02-17T22:19:43.493Z" }, + { url = "https://files.pythonhosted.org/packages/03/30/f1b502a72468c89412c1b882a08f6eed8a4ee9dc033f35f65d0663df6081/pandas-3.0.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1849f0bba9c8a2fb0f691d492b834cc8dadf617e29015c66e989448d58d011ee", size = 11442711, upload-time = "2026-02-17T22:19:46.074Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f0/ebb6ddd8fc049e98cabac5c2924d14d1dda26a20adb70d41ea2e428d3ec4/pandas-3.0.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3d288439e11b5325b02ae6e9cc83e6805a62c40c5a6220bea9beb899c073b1c", size = 11963918, upload-time = "2026-02-17T22:19:48.838Z" }, + { url = "https://files.pythonhosted.org/packages/09/f8/8ce132104074f977f907442790eaae24e27bce3b3b454e82faa3237ff098/pandas-3.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:93325b0fe372d192965f4cca88d97667f49557398bbf94abdda3bf1b591dbe66", size = 9862099, upload-time = "2026-02-17T22:19:51.081Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b7/6af9aac41ef2456b768ef0ae60acf8abcebb450a52043d030a65b4b7c9bd/pandas-3.0.1-cp314-cp314-win_arm64.whl", hash = "sha256:97ca08674e3287c7148f4858b01136f8bdfe7202ad25ad04fec602dd1d29d132", size = 9185333, upload-time = "2026-02-17T22:19:53.266Z" }, + { url = "https://files.pythonhosted.org/packages/66/fc/848bb6710bc6061cb0c5badd65b92ff75c81302e0e31e496d00029fe4953/pandas-3.0.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:58eeb1b2e0fb322befcf2bbc9ba0af41e616abadb3d3414a6bc7167f6cbfce32", size = 10772664, upload-time = "2026-02-17T22:19:55.806Z" }, + { url = "https://files.pythonhosted.org/packages/69/5c/866a9bbd0f79263b4b0db6ec1a341be13a1473323f05c122388e0f15b21d/pandas-3.0.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cd9af1276b5ca9e298bd79a26bda32fa9cc87ed095b2a9a60978d2ca058eaf87", size = 10421286, upload-time = "2026-02-17T22:19:58.091Z" }, + { url = "https://files.pythonhosted.org/packages/51/a4/2058fb84fb1cfbfb2d4a6d485e1940bb4ad5716e539d779852494479c580/pandas-3.0.1-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94f87a04984d6b63788327cd9f79dda62b7f9043909d2440ceccf709249ca988", size = 10342050, upload-time = "2026-02-17T22:20:01.376Z" }, + { url = "https://files.pythonhosted.org/packages/22/1b/674e89996cc4be74db3c4eb09240c4bb549865c9c3f5d9b086ff8fcfbf00/pandas-3.0.1-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85fe4c4df62e1e20f9db6ebfb88c844b092c22cd5324bdcf94bfa2fc1b391221", size = 10740055, upload-time = "2026-02-17T22:20:04.328Z" }, + { url = "https://files.pythonhosted.org/packages/d0/f8/e954b750764298c22fa4614376531fe63c521ef517e7059a51f062b87dca/pandas-3.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:331ca75a2f8672c365ae25c0b29e46f5ac0c6551fdace8eec4cd65e4fac271ff", size = 11357632, upload-time = "2026-02-17T22:20:06.647Z" }, + { url = "https://files.pythonhosted.org/packages/6d/02/c6e04b694ffd68568297abd03588b6d30295265176a5c01b7459d3bc35a3/pandas-3.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:15860b1fdb1973fffade772fdb931ccf9b2f400a3f5665aef94a00445d7d8dd5", size = 11810974, upload-time = "2026-02-17T22:20:08.946Z" }, + { url = "https://files.pythonhosted.org/packages/89/41/d7dfb63d2407f12055215070c42fc6ac41b66e90a2946cdc5e759058398b/pandas-3.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:44f1364411d5670efa692b146c748f4ed013df91ee91e9bec5677fb1fd58b937", size = 10884622, upload-time = "2026-02-17T22:20:11.711Z" }, + { url = "https://files.pythonhosted.org/packages/68/b0/34937815889fa982613775e4b97fddd13250f11012d769949c5465af2150/pandas-3.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:108dd1790337a494aa80e38def654ca3f0968cf4f362c85f44c15e471667102d", size = 9452085, upload-time = "2026-02-17T22:20:14.331Z" }, +] + +[[package]] +name = "parsimonious" +version = "0.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7b/91/abdc50c4ef06fdf8d047f60ee777ca9b2a7885e1a9cea81343fbecda52d7/parsimonious-0.10.0.tar.gz", hash = "sha256:8281600da180ec8ae35427a4ab4f7b82bfec1e3d1e52f80cb60ea82b9512501c", size = 52172, upload-time = "2022-09-03T17:01:17.004Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/0f/c8b64d9b54ea631fcad4e9e3c8dbe8c11bb32a623be94f22974c88e71eaf/parsimonious-0.10.0-py3-none-any.whl", hash = "sha256:982ab435fabe86519b57f6b35610aa4e4e977e9f02a14353edf4bbc75369fc0f", size = 48427, upload-time = "2022-09-03T17:01:13.814Z" }, +] + +[[package]] +name = "perp-bot" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "eth-account" }, + { name = "hyperliquid-python-sdk" }, + { name = "numpy" }, + { name = "pandas" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "textual" }, + { name = "websockets" }, +] + +[package.optional-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "ruff" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "eth-account" }, + { name = "hyperliquid-python-sdk" }, + { name = "numpy" }, + { name = "pandas" }, + { name = "pytest", marker = "extra == 'dev'" }, + { name = "pytest-asyncio", marker = "extra == 'dev'" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "ruff", marker = "extra == 'dev'" }, + { name = "textual", specifier = ">=1.0.0" }, + { name = "websockets" }, +] +provides-extras = ["dev"] + +[package.metadata.requires-dev] +dev = [ + { name = "pytest", specifier = ">=9.0.2" }, + { name = "pytest-asyncio", specifier = ">=1.3.0" }, + { name = "ruff", specifier = ">=0.15.6" }, +] + +[[package]] +name = "platformdirs" +version = "4.9.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/56/8d4c30c8a1d07013911a8fdbd8f89440ef9f08d07a1b50ab8ca8be5a20f9/platformdirs-4.9.4.tar.gz", hash = "sha256:1ec356301b7dc906d83f371c8f487070e99d3ccf9e501686456394622a01a934", size = 28737, upload-time = "2026-03-05T18:34:13.271Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/63/d7/97f7e3a6abb67d8080dd406fd4df842c2be0efaf712d1c899c32a075027c/platformdirs-4.9.4-py3-none-any.whl", hash = "sha256:68a9a4619a666ea6439f2ff250c12a853cd1cbd5158d258bd824a7df6be2f868", size = 21216, upload-time = "2026-03-05T18:34:12.172Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pycryptodome" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/a6/8452177684d5e906854776276ddd34eca30d1b1e15aa1ee9cefc289a33f5/pycryptodome-3.23.0.tar.gz", hash = "sha256:447700a657182d60338bab09fdb27518f8856aecd80ae4c6bdddb67ff5da44ef", size = 4921276, upload-time = "2025-05-17T17:21:45.242Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/5d/bdb09489b63cd34a976cc9e2a8d938114f7a53a74d3dd4f125ffa49dce82/pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:0011f7f00cdb74879142011f95133274741778abba114ceca229adbf8e62c3e4", size = 2495152, upload-time = "2025-05-17T17:20:20.833Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ce/7840250ed4cc0039c433cd41715536f926d6e86ce84e904068eb3244b6a6/pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:90460fc9e088ce095f9ee8356722d4f10f86e5be06e2354230a9880b9c549aae", size = 1639348, upload-time = "2025-05-17T17:20:23.171Z" }, + { url = "https://files.pythonhosted.org/packages/ee/f0/991da24c55c1f688d6a3b5a11940567353f74590734ee4a64294834ae472/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4764e64b269fc83b00f682c47443c2e6e85b18273712b98aa43bcb77f8570477", size = 2184033, upload-time = "2025-05-17T17:20:25.424Z" }, + { url = "https://files.pythonhosted.org/packages/54/16/0e11882deddf00f68b68dd4e8e442ddc30641f31afeb2bc25588124ac8de/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb8f24adb74984aa0e5d07a2368ad95276cf38051fe2dc6605cbcf482e04f2a7", size = 2270142, upload-time = "2025-05-17T17:20:27.808Z" }, + { url = "https://files.pythonhosted.org/packages/d5/fc/4347fea23a3f95ffb931f383ff28b3f7b1fe868739182cb76718c0da86a1/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d97618c9c6684a97ef7637ba43bdf6663a2e2e77efe0f863cce97a76af396446", size = 2309384, upload-time = "2025-05-17T17:20:30.765Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d9/c5261780b69ce66d8cfab25d2797bd6e82ba0241804694cd48be41add5eb/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a53a4fe5cb075075d515797d6ce2f56772ea7e6a1e5e4b96cf78a14bac3d265", size = 2183237, upload-time = "2025-05-17T17:20:33.736Z" }, + { url = "https://files.pythonhosted.org/packages/5a/6f/3af2ffedd5cfa08c631f89452c6648c4d779e7772dfc388c77c920ca6bbf/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:763d1d74f56f031788e5d307029caef067febf890cd1f8bf61183ae142f1a77b", size = 2343898, upload-time = "2025-05-17T17:20:36.086Z" }, + { url = "https://files.pythonhosted.org/packages/9a/dc/9060d807039ee5de6e2f260f72f3d70ac213993a804f5e67e0a73a56dd2f/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:954af0e2bd7cea83ce72243b14e4fb518b18f0c1649b576d114973e2073b273d", size = 2269197, upload-time = "2025-05-17T17:20:38.414Z" }, + { url = "https://files.pythonhosted.org/packages/f9/34/e6c8ca177cb29dcc4967fef73f5de445912f93bd0343c9c33c8e5bf8cde8/pycryptodome-3.23.0-cp313-cp313t-win32.whl", hash = "sha256:257bb3572c63ad8ba40b89f6fc9d63a2a628e9f9708d31ee26560925ebe0210a", size = 1768600, upload-time = "2025-05-17T17:20:40.688Z" }, + { url = "https://files.pythonhosted.org/packages/e4/1d/89756b8d7ff623ad0160f4539da571d1f594d21ee6d68be130a6eccb39a4/pycryptodome-3.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6501790c5b62a29fcb227bd6b62012181d886a767ce9ed03b303d1f22eb5c625", size = 1799740, upload-time = "2025-05-17T17:20:42.413Z" }, + { url = "https://files.pythonhosted.org/packages/5d/61/35a64f0feaea9fd07f0d91209e7be91726eb48c0f1bfc6720647194071e4/pycryptodome-3.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9a77627a330ab23ca43b48b130e202582e91cc69619947840ea4d2d1be21eb39", size = 1703685, upload-time = "2025-05-17T17:20:44.388Z" }, + { url = "https://files.pythonhosted.org/packages/db/6c/a1f71542c969912bb0e106f64f60a56cc1f0fabecf9396f45accbe63fa68/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:187058ab80b3281b1de11c2e6842a357a1f71b42cb1e15bce373f3d238135c27", size = 2495627, upload-time = "2025-05-17T17:20:47.139Z" }, + { url = "https://files.pythonhosted.org/packages/6e/4e/a066527e079fc5002390c8acdd3aca431e6ea0a50ffd7201551175b47323/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:cfb5cd445280c5b0a4e6187a7ce8de5a07b5f3f897f235caa11f1f435f182843", size = 1640362, upload-time = "2025-05-17T17:20:50.392Z" }, + { url = "https://files.pythonhosted.org/packages/50/52/adaf4c8c100a8c49d2bd058e5b551f73dfd8cb89eb4911e25a0c469b6b4e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67bd81fcbe34f43ad9422ee8fd4843c8e7198dd88dd3d40e6de42ee65fbe1490", size = 2182625, upload-time = "2025-05-17T17:20:52.866Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e9/a09476d436d0ff1402ac3867d933c61805ec2326c6ea557aeeac3825604e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8987bd3307a39bc03df5c8e0e3d8be0c4c3518b7f044b0f4c15d1aa78f52575", size = 2268954, upload-time = "2025-05-17T17:20:55.027Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c5/ffe6474e0c551d54cab931918127c46d70cab8f114e0c2b5a3c071c2f484/pycryptodome-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa0698f65e5b570426fc31b8162ed4603b0c2841cbb9088e2b01641e3065915b", size = 2308534, upload-time = "2025-05-17T17:20:57.279Z" }, + { url = "https://files.pythonhosted.org/packages/18/28/e199677fc15ecf43010f2463fde4c1a53015d1fe95fb03bca2890836603a/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:53ecbafc2b55353edcebd64bf5da94a2a2cdf5090a6915bcca6eca6cc452585a", size = 2181853, upload-time = "2025-05-17T17:20:59.322Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ea/4fdb09f2165ce1365c9eaefef36625583371ee514db58dc9b65d3a255c4c/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:156df9667ad9f2ad26255926524e1c136d6664b741547deb0a86a9acf5ea631f", size = 2342465, upload-time = "2025-05-17T17:21:03.83Z" }, + { url = "https://files.pythonhosted.org/packages/22/82/6edc3fc42fe9284aead511394bac167693fb2b0e0395b28b8bedaa07ef04/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:dea827b4d55ee390dc89b2afe5927d4308a8b538ae91d9c6f7a5090f397af1aa", size = 2267414, upload-time = "2025-05-17T17:21:06.72Z" }, + { url = "https://files.pythonhosted.org/packages/59/fe/aae679b64363eb78326c7fdc9d06ec3de18bac68be4b612fc1fe8902693c/pycryptodome-3.23.0-cp37-abi3-win32.whl", hash = "sha256:507dbead45474b62b2bbe318eb1c4c8ee641077532067fec9c1aa82c31f84886", size = 1768484, upload-time = "2025-05-17T17:21:08.535Z" }, + { url = "https://files.pythonhosted.org/packages/54/2f/e97a1b8294db0daaa87012c24a7bb714147c7ade7656973fd6c736b484ff/pycryptodome-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:c75b52aacc6c0c260f204cbdd834f76edc9fb0d8e0da9fbf8352ef58202564e2", size = 1799636, upload-time = "2025-05-17T17:21:10.393Z" }, + { url = "https://files.pythonhosted.org/packages/18/3d/f9441a0d798bf2b1e645adc3265e55706aead1255ccdad3856dbdcffec14/pycryptodome-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:11eeeb6917903876f134b56ba11abe95c0b0fd5e3330def218083c7d98bbcb3c", size = 1703675, upload-time = "2025-05-17T17:21:13.146Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "regex" +version = "2026.2.28" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/71/41455aa99a5a5ac1eaf311f5d8efd9ce6433c03ac1e0962de163350d0d97/regex-2026.2.28.tar.gz", hash = "sha256:a729e47d418ea11d03469f321aaf67cdee8954cde3ff2cf8403ab87951ad10f2", size = 415184, upload-time = "2026-02-28T02:19:42.792Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/42/9061b03cf0fc4b5fa2c3984cbbaed54324377e440a5c5a29d29a72518d62/regex-2026.2.28-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fcf26c3c6d0da98fada8ae4ef0aa1c3405a431c0a77eb17306d38a89b02adcd7", size = 489574, upload-time = "2026-02-28T02:16:50.455Z" }, + { url = "https://files.pythonhosted.org/packages/77/83/0c8a5623a233015595e3da499c5a1c13720ac63c107897a6037bb97af248/regex-2026.2.28-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02473c954af35dd2defeb07e44182f5705b30ea3f351a7cbffa9177beb14da5d", size = 291426, upload-time = "2026-02-28T02:16:52.52Z" }, + { url = "https://files.pythonhosted.org/packages/9e/06/3ef1ac6910dc3295ebd71b1f9bfa737e82cfead211a18b319d45f85ddd09/regex-2026.2.28-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9b65d33a17101569f86d9c5966a8b1d7fbf8afdda5a8aa219301b0a80f58cf7d", size = 289200, upload-time = "2026-02-28T02:16:54.08Z" }, + { url = "https://files.pythonhosted.org/packages/dd/c9/8cc8d850b35ab5650ff6756a1cb85286e2000b66c97520b29c1587455344/regex-2026.2.28-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e71dcecaa113eebcc96622c17692672c2d104b1d71ddf7adeda90da7ddeb26fc", size = 796765, upload-time = "2026-02-28T02:16:55.905Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5d/57702597627fc23278ebf36fbb497ac91c0ce7fec89ac6c81e420ca3e38c/regex-2026.2.28-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:481df4623fa4969c8b11f3433ed7d5e3dc9cec0f008356c3212b3933fb77e3d8", size = 863093, upload-time = "2026-02-28T02:16:58.094Z" }, + { url = "https://files.pythonhosted.org/packages/02/6d/f3ecad537ca2811b4d26b54ca848cf70e04fcfc138667c146a9f3157779c/regex-2026.2.28-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:64e7c6ad614573e0640f271e811a408d79a9e1fe62a46adb602f598df42a818d", size = 909455, upload-time = "2026-02-28T02:17:00.918Z" }, + { url = "https://files.pythonhosted.org/packages/9e/40/bb226f203caa22c1043c1ca79b36340156eca0f6a6742b46c3bb222a3a57/regex-2026.2.28-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6b08a06976ff4fb0d83077022fde3eca06c55432bb997d8c0495b9a4e9872f4", size = 802037, upload-time = "2026-02-28T02:17:02.842Z" }, + { url = "https://files.pythonhosted.org/packages/44/7c/c6d91d8911ac6803b45ca968e8e500c46934e58c0903cbc6d760ee817a0a/regex-2026.2.28-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:864cdd1a2ef5716b0ab468af40139e62ede1b3a53386b375ec0786bb6783fc05", size = 775113, upload-time = "2026-02-28T02:17:04.506Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8d/4a9368d168d47abd4158580b8c848709667b1cd293ff0c0c277279543bd0/regex-2026.2.28-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:511f7419f7afab475fd4d639d4aedfc54205bcb0800066753ef68a59f0f330b5", size = 784194, upload-time = "2026-02-28T02:17:06.888Z" }, + { url = "https://files.pythonhosted.org/packages/cc/bf/2c72ab5d8b7be462cb1651b5cc333da1d0068740342f350fcca3bca31947/regex-2026.2.28-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b42f7466e32bf15a961cf09f35fa6323cc72e64d3d2c990b10de1274a5da0a59", size = 856846, upload-time = "2026-02-28T02:17:09.11Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f4/6b65c979bb6d09f51bb2d2a7bc85de73c01ec73335d7ddd202dcb8cd1c8f/regex-2026.2.28-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8710d61737b0c0ce6836b1da7109f20d495e49b3809f30e27e9560be67a257bf", size = 763516, upload-time = "2026-02-28T02:17:11.004Z" }, + { url = "https://files.pythonhosted.org/packages/8e/32/29ea5e27400ee86d2cc2b4e80aa059df04eaf78b4f0c18576ae077aeff68/regex-2026.2.28-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4390c365fd2d45278f45afd4673cb90f7285f5701607e3ad4274df08e36140ae", size = 849278, upload-time = "2026-02-28T02:17:12.693Z" }, + { url = "https://files.pythonhosted.org/packages/1d/91/3233d03b5f865111cd517e1c95ee8b43e8b428d61fa73764a80c9bb6f537/regex-2026.2.28-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cb3b1db8ff6c7b8bf838ab05583ea15230cb2f678e569ab0e3a24d1e8320940b", size = 790068, upload-time = "2026-02-28T02:17:14.9Z" }, + { url = "https://files.pythonhosted.org/packages/76/92/abc706c1fb03b4580a09645b206a3fc032f5a9f457bc1a8038ac555658ab/regex-2026.2.28-cp312-cp312-win32.whl", hash = "sha256:f8ed9a5d4612df9d4de15878f0bc6aa7a268afbe5af21a3fdd97fa19516e978c", size = 266416, upload-time = "2026-02-28T02:17:17.15Z" }, + { url = "https://files.pythonhosted.org/packages/fa/06/2a6f7dff190e5fa9df9fb4acf2fdf17a1aa0f7f54596cba8de608db56b3a/regex-2026.2.28-cp312-cp312-win_amd64.whl", hash = "sha256:01d65fd24206c8e1e97e2e31b286c59009636c022eb5d003f52760b0f42155d4", size = 277297, upload-time = "2026-02-28T02:17:18.723Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f0/58a2484851fadf284458fdbd728f580d55c1abac059ae9f048c63b92f427/regex-2026.2.28-cp312-cp312-win_arm64.whl", hash = "sha256:c0b5ccbb8ffb433939d248707d4a8b31993cb76ab1a0187ca886bf50e96df952", size = 270408, upload-time = "2026-02-28T02:17:20.328Z" }, + { url = "https://files.pythonhosted.org/packages/87/f6/dc9ef48c61b79c8201585bf37fa70cd781977da86e466cd94e8e95d2443b/regex-2026.2.28-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6d63a07e5ec8ce7184452cb00c41c37b49e67dc4f73b2955b5b8e782ea970784", size = 489311, upload-time = "2026-02-28T02:17:22.591Z" }, + { url = "https://files.pythonhosted.org/packages/95/c8/c20390f2232d3f7956f420f4ef1852608ad57aa26c3dd78516cb9f3dc913/regex-2026.2.28-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e59bc8f30414d283ae8ee1617b13d8112e7135cb92830f0ec3688cb29152585a", size = 291285, upload-time = "2026-02-28T02:17:24.355Z" }, + { url = "https://files.pythonhosted.org/packages/d2/a6/ba1068a631ebd71a230e7d8013fcd284b7c89c35f46f34a7da02082141b1/regex-2026.2.28-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:de0cf053139f96219ccfabb4a8dd2d217c8c82cb206c91d9f109f3f552d6b43d", size = 289051, upload-time = "2026-02-28T02:17:26.722Z" }, + { url = "https://files.pythonhosted.org/packages/1d/1b/7cc3b7af4c244c204b7a80924bd3d85aecd9ba5bc82b485c5806ee8cda9e/regex-2026.2.28-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb4db2f17e6484904f986c5a657cec85574c76b5c5e61c7aae9ffa1bc6224f95", size = 796842, upload-time = "2026-02-28T02:17:29.064Z" }, + { url = "https://files.pythonhosted.org/packages/24/87/26bd03efc60e0d772ac1e7b60a2e6325af98d974e2358f659c507d3c76db/regex-2026.2.28-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52b017b35ac2214d0db5f4f90e303634dc44e4aba4bd6235a27f97ecbe5b0472", size = 863083, upload-time = "2026-02-28T02:17:31.363Z" }, + { url = "https://files.pythonhosted.org/packages/ae/54/aeaf4afb1aa0a65e40de52a61dc2ac5b00a83c6cb081c8a1d0dda74f3010/regex-2026.2.28-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:69fc560ccbf08a09dc9b52ab69cacfae51e0ed80dc5693078bdc97db2f91ae96", size = 909412, upload-time = "2026-02-28T02:17:33.248Z" }, + { url = "https://files.pythonhosted.org/packages/12/2f/049901def913954e640d199bbc6a7ca2902b6aeda0e5da9d17f114100ec2/regex-2026.2.28-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e61eea47230eba62a31f3e8a0e3164d0f37ef9f40529fb2c79361bc6b53d2a92", size = 802101, upload-time = "2026-02-28T02:17:35.053Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/512fb9ff7f5b15ea204bb1967ebb649059446decacccb201381f9fa6aad4/regex-2026.2.28-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4f5c0b182ad4269e7381b7c27fdb0408399881f7a92a4624fd5487f2971dfc11", size = 775260, upload-time = "2026-02-28T02:17:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/9a92935878aba19bd72706b9db5646a6f993d99b3f6ed42c02ec8beb1d61/regex-2026.2.28-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:96f6269a2882fbb0ee76967116b83679dc628e68eaea44e90884b8d53d833881", size = 784311, upload-time = "2026-02-28T02:17:39.855Z" }, + { url = "https://files.pythonhosted.org/packages/09/d3/fc51a8a738a49a6b6499626580554c9466d3ea561f2b72cfdc72e4149773/regex-2026.2.28-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b5acd4b6a95f37c3c3828e5d053a7d4edaedb85de551db0153754924cb7c83e3", size = 856876, upload-time = "2026-02-28T02:17:42.317Z" }, + { url = "https://files.pythonhosted.org/packages/08/b7/2e641f3d084b120ca4c52e8c762a78da0b32bf03ef546330db3e2635dc5f/regex-2026.2.28-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2234059cfe33d9813a3677ef7667999caea9eeaa83fef98eb6ce15c6cf9e0215", size = 763632, upload-time = "2026-02-28T02:17:45.073Z" }, + { url = "https://files.pythonhosted.org/packages/fe/6d/0009021d97e79ee99f3d8641f0a8d001eed23479ade4c3125a5480bf3e2d/regex-2026.2.28-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:c15af43c72a7fb0c97cbc66fa36a43546eddc5c06a662b64a0cbf30d6ac40944", size = 849320, upload-time = "2026-02-28T02:17:47.192Z" }, + { url = "https://files.pythonhosted.org/packages/05/7a/51cfbad5758f8edae430cb21961a9c8d04bce1dae4d2d18d4186eec7cfa1/regex-2026.2.28-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9185cc63359862a6e80fe97f696e04b0ad9a11c4ac0a4a927f979f611bfe3768", size = 790152, upload-time = "2026-02-28T02:17:49.067Z" }, + { url = "https://files.pythonhosted.org/packages/90/3d/a83e2b6b3daa142acb8c41d51de3876186307d5cb7490087031747662500/regex-2026.2.28-cp313-cp313-win32.whl", hash = "sha256:fb66e5245db9652abd7196ace599b04d9c0e4aa7c8f0e2803938377835780081", size = 266398, upload-time = "2026-02-28T02:17:50.744Z" }, + { url = "https://files.pythonhosted.org/packages/85/4f/16e9ebb1fe5425e11b9596c8d57bf8877dcb32391da0bfd33742e3290637/regex-2026.2.28-cp313-cp313-win_amd64.whl", hash = "sha256:71a911098be38c859ceb3f9a9ce43f4ed9f4c6720ad8684a066ea246b76ad9ff", size = 277282, upload-time = "2026-02-28T02:17:53.074Z" }, + { url = "https://files.pythonhosted.org/packages/07/b4/92851335332810c5a89723bf7a7e35c7209f90b7d4160024501717b28cc9/regex-2026.2.28-cp313-cp313-win_arm64.whl", hash = "sha256:39bb5727650b9a0275c6a6690f9bb3fe693a7e6cc5c3155b1240aedf8926423e", size = 270382, upload-time = "2026-02-28T02:17:54.888Z" }, + { url = "https://files.pythonhosted.org/packages/24/07/6c7e4cec1e585959e96cbc24299d97e4437a81173217af54f1804994e911/regex-2026.2.28-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:97054c55db06ab020342cc0d35d6f62a465fa7662871190175f1ad6c655c028f", size = 492541, upload-time = "2026-02-28T02:17:56.813Z" }, + { url = "https://files.pythonhosted.org/packages/7c/13/55eb22ada7f43d4f4bb3815b6132183ebc331c81bd496e2d1f3b8d862e0d/regex-2026.2.28-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d25a10811de831c2baa6aef3c0be91622f44dd8d31dd12e69f6398efb15e48b", size = 292984, upload-time = "2026-02-28T02:17:58.538Z" }, + { url = "https://files.pythonhosted.org/packages/5b/11/c301f8cb29ce9644a5ef85104c59244e6e7e90994a0f458da4d39baa8e17/regex-2026.2.28-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d6cfe798d8da41bb1862ed6e0cba14003d387c3c0c4a5d45591076ae9f0ce2f8", size = 291509, upload-time = "2026-02-28T02:18:00.208Z" }, + { url = "https://files.pythonhosted.org/packages/b5/43/aabe384ec1994b91796e903582427bc2ffaed9c4103819ed3c16d8e749f3/regex-2026.2.28-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fd0ce43e71d825b7c0661f9c54d4d74bd97c56c3fd102a8985bcfea48236bacb", size = 809429, upload-time = "2026-02-28T02:18:02.328Z" }, + { url = "https://files.pythonhosted.org/packages/04/b8/8d2d987a816720c4f3109cee7c06a4b24ad0e02d4fc74919ab619e543737/regex-2026.2.28-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00945d007fd74a9084d2ab79b695b595c6b7ba3698972fadd43e23230c6979c1", size = 869422, upload-time = "2026-02-28T02:18:04.23Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ad/2c004509e763c0c3719f97c03eca26473bffb3868d54c5f280b8cd4f9e3d/regex-2026.2.28-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bec23c11cbbf09a4df32fe50d57cbdd777bc442269b6e39a1775654f1c95dee2", size = 915175, upload-time = "2026-02-28T02:18:06.791Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/fd429066da487ef555a9da73bf214894aec77fc8c66a261ee355a69871a8/regex-2026.2.28-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5cdcc17d935c8f9d3f4db5c2ebe2640c332e3822ad5d23c2f8e0228e6947943a", size = 812044, upload-time = "2026-02-28T02:18:08.736Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ca/feedb7055c62a3f7f659971bf45f0e0a87544b6b0cf462884761453f97c5/regex-2026.2.28-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a448af01e3d8031c89c5d902040b124a5e921a25c4e5e07a861ca591ce429341", size = 782056, upload-time = "2026-02-28T02:18:10.777Z" }, + { url = "https://files.pythonhosted.org/packages/95/30/1aa959ed0d25c1dd7dd5047ea8ba482ceaef38ce363c401fd32a6b923e60/regex-2026.2.28-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:10d28e19bd4888e4abf43bd3925f3c134c52fdf7259219003588a42e24c2aa25", size = 798743, upload-time = "2026-02-28T02:18:13.025Z" }, + { url = "https://files.pythonhosted.org/packages/3b/1f/dadb9cf359004784051c897dcf4d5d79895f73a1bbb7b827abaa4814ae80/regex-2026.2.28-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:99985a2c277dcb9ccb63f937451af5d65177af1efdeb8173ac55b61095a0a05c", size = 864633, upload-time = "2026-02-28T02:18:16.84Z" }, + { url = "https://files.pythonhosted.org/packages/a7/f1/b9a25eb24e1cf79890f09e6ec971ee5b511519f1851de3453bc04f6c902b/regex-2026.2.28-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:e1e7b24cb3ae9953a560c563045d1ba56ee4749fbd05cf21ba571069bd7be81b", size = 770862, upload-time = "2026-02-28T02:18:18.892Z" }, + { url = "https://files.pythonhosted.org/packages/02/9a/c5cb10b7aa6f182f9247a30cc9527e326601f46f4df864ac6db588d11fcd/regex-2026.2.28-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d8511a01d0e4ee1992eb3ba19e09bc1866fe03f05129c3aec3fdc4cbc77aad3f", size = 854788, upload-time = "2026-02-28T02:18:21.475Z" }, + { url = "https://files.pythonhosted.org/packages/0a/50/414ba0731c4bd40b011fa4703b2cc86879ec060c64f2a906e65a56452589/regex-2026.2.28-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:aaffaecffcd2479ce87aa1e74076c221700b7c804e48e98e62500ee748f0f550", size = 800184, upload-time = "2026-02-28T02:18:23.492Z" }, + { url = "https://files.pythonhosted.org/packages/69/50/0c7290987f97e7e6830b0d853f69dc4dc5852c934aae63e7fdcd76b4c383/regex-2026.2.28-cp313-cp313t-win32.whl", hash = "sha256:ef77bdde9c9eba3f7fa5b58084b29bbcc74bcf55fdbeaa67c102a35b5bd7e7cc", size = 269137, upload-time = "2026-02-28T02:18:25.375Z" }, + { url = "https://files.pythonhosted.org/packages/68/80/ef26ff90e74ceb4051ad6efcbbb8a4be965184a57e879ebcbdef327d18fa/regex-2026.2.28-cp313-cp313t-win_amd64.whl", hash = "sha256:98adf340100cbe6fbaf8e6dc75e28f2c191b1be50ffefe292fb0e6f6eefdb0d8", size = 280682, upload-time = "2026-02-28T02:18:27.205Z" }, + { url = "https://files.pythonhosted.org/packages/69/8b/fbad9c52e83ffe8f97e3ed1aa0516e6dff6bb633a41da9e64645bc7efdc5/regex-2026.2.28-cp313-cp313t-win_arm64.whl", hash = "sha256:2fb950ac1d88e6b6a9414381f403797b236f9fa17e1eee07683af72b1634207b", size = 271735, upload-time = "2026-02-28T02:18:29.015Z" }, + { url = "https://files.pythonhosted.org/packages/cf/03/691015f7a7cb1ed6dacb2ea5de5682e4858e05a4c5506b2839cd533bbcd6/regex-2026.2.28-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:78454178c7df31372ea737996fb7f36b3c2c92cccc641d251e072478afb4babc", size = 489497, upload-time = "2026-02-28T02:18:30.889Z" }, + { url = "https://files.pythonhosted.org/packages/c6/ba/8db8fd19afcbfa0e1036eaa70c05f20ca8405817d4ad7a38a6b4c2f031ac/regex-2026.2.28-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:5d10303dd18cedfd4d095543998404df656088240bcfd3cd20a8f95b861f74bd", size = 291295, upload-time = "2026-02-28T02:18:33.426Z" }, + { url = "https://files.pythonhosted.org/packages/5a/79/9aa0caf089e8defef9b857b52fc53801f62ff868e19e5c83d4a96612eba1/regex-2026.2.28-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:19a9c9e0a8f24f39d575a6a854d516b48ffe4cbdcb9de55cb0570a032556ecff", size = 289275, upload-time = "2026-02-28T02:18:35.247Z" }, + { url = "https://files.pythonhosted.org/packages/eb/26/ee53117066a30ef9c883bf1127eece08308ccf8ccd45c45a966e7a665385/regex-2026.2.28-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09500be324f49b470d907b3ef8af9afe857f5cca486f853853f7945ddbf75911", size = 797176, upload-time = "2026-02-28T02:18:37.15Z" }, + { url = "https://files.pythonhosted.org/packages/05/1b/67fb0495a97259925f343ae78b5d24d4a6624356ae138b57f18bd43006e4/regex-2026.2.28-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fb1c4ff62277d87a7335f2c1ea4e0387b8f2b3ad88a64efd9943906aafad4f33", size = 863813, upload-time = "2026-02-28T02:18:39.478Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/93ac9bbafc53618091c685c7ed40239a90bf9f2a82c983f0baa97cb7ae07/regex-2026.2.28-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b8b3f1be1738feadc69f62daa250c933e85c6f34fa378f54a7ff43807c1b9117", size = 908678, upload-time = "2026-02-28T02:18:41.619Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7a/a8f5e0561702b25239846a16349feece59712ae20598ebb205580332a471/regex-2026.2.28-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc8ed8c3f41c27acb83f7b6a9eb727a73fc6663441890c5cb3426a5f6a91ce7d", size = 801528, upload-time = "2026-02-28T02:18:43.624Z" }, + { url = "https://files.pythonhosted.org/packages/96/5d/ed6d4cbde80309854b1b9f42d9062fee38ade15f7eb4909f6ef2440403b5/regex-2026.2.28-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa539be029844c0ce1114762d2952ab6cfdd7c7c9bd72e0db26b94c3c36dcc5a", size = 775373, upload-time = "2026-02-28T02:18:46.102Z" }, + { url = "https://files.pythonhosted.org/packages/6a/e9/6e53c34e8068b9deec3e87210086ecb5b9efebdefca6b0d3fa43d66dcecb/regex-2026.2.28-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7900157786428a79615a8264dac1f12c9b02957c473c8110c6b1f972dcecaddf", size = 784859, upload-time = "2026-02-28T02:18:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/48/3c/736e1c7ca7f0dcd2ae33819888fdc69058a349b7e5e84bc3e2f296bbf794/regex-2026.2.28-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0b1d2b07614d95fa2bf8a63fd1e98bd8fa2b4848dc91b1efbc8ba219fdd73952", size = 857813, upload-time = "2026-02-28T02:18:50.576Z" }, + { url = "https://files.pythonhosted.org/packages/6e/7c/48c4659ad9da61f58e79dbe8c05223e0006696b603c16eb6b5cbfbb52c27/regex-2026.2.28-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:b389c61aa28a79c2e0527ac36da579869c2e235a5b208a12c5b5318cda2501d8", size = 763705, upload-time = "2026-02-28T02:18:52.59Z" }, + { url = "https://files.pythonhosted.org/packages/cf/a1/bc1c261789283128165f71b71b4b221dd1b79c77023752a6074c102f18d8/regex-2026.2.28-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f467cb602f03fbd1ab1908f68b53c649ce393fde056628dc8c7e634dab6bfc07", size = 848734, upload-time = "2026-02-28T02:18:54.595Z" }, + { url = "https://files.pythonhosted.org/packages/10/d8/979407faf1397036e25a5ae778157366a911c0f382c62501009f4957cf86/regex-2026.2.28-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e8c8cb2deba42f5ec1ede46374e990f8adc5e6456a57ac1a261b19be6f28e4e6", size = 789871, upload-time = "2026-02-28T02:18:57.34Z" }, + { url = "https://files.pythonhosted.org/packages/03/23/da716821277115fcb1f4e3de1e5dc5023a1e6533598c486abf5448612579/regex-2026.2.28-cp314-cp314-win32.whl", hash = "sha256:9036b400b20e4858d56d117108d7813ed07bb7803e3eed766675862131135ca6", size = 271825, upload-time = "2026-02-28T02:18:59.202Z" }, + { url = "https://files.pythonhosted.org/packages/91/ff/90696f535d978d5f16a52a419be2770a8d8a0e7e0cfecdbfc31313df7fab/regex-2026.2.28-cp314-cp314-win_amd64.whl", hash = "sha256:1d367257cd86c1cbb97ea94e77b373a0bbc2224976e247f173d19e8f18b4afa7", size = 280548, upload-time = "2026-02-28T02:19:01.049Z" }, + { url = "https://files.pythonhosted.org/packages/69/f9/5e1b5652fc0af3fcdf7677e7df3ad2a0d47d669b34ac29a63bb177bb731b/regex-2026.2.28-cp314-cp314-win_arm64.whl", hash = "sha256:5e68192bb3a1d6fb2836da24aa494e413ea65853a21505e142e5b1064a595f3d", size = 273444, upload-time = "2026-02-28T02:19:03.255Z" }, + { url = "https://files.pythonhosted.org/packages/d3/eb/8389f9e940ac89bcf58d185e230a677b4fd07c5f9b917603ad5c0f8fa8fe/regex-2026.2.28-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:a5dac14d0872eeb35260a8e30bac07ddf22adc1e3a0635b52b02e180d17c9c7e", size = 492546, upload-time = "2026-02-28T02:19:05.378Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c7/09441d27ce2a6fa6a61ea3150ea4639c1dcda9b31b2ea07b80d6937b24dd/regex-2026.2.28-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ec0c608b7a7465ffadb344ed7c987ff2f11ee03f6a130b569aa74d8a70e8333c", size = 292986, upload-time = "2026-02-28T02:19:07.24Z" }, + { url = "https://files.pythonhosted.org/packages/fb/69/4144b60ed7760a6bd235e4087041f487aa4aa62b45618ce018b0c14833ea/regex-2026.2.28-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c7815afb0ca45456613fdaf60ea9c993715511c8d53a83bc468305cbc0ee23c7", size = 291518, upload-time = "2026-02-28T02:19:09.698Z" }, + { url = "https://files.pythonhosted.org/packages/2d/be/77e5426cf5948c82f98c53582009ca9e94938c71f73a8918474f2e2990bb/regex-2026.2.28-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b059e71ec363968671693a78c5053bd9cb2fe410f9b8e4657e88377ebd603a2e", size = 809464, upload-time = "2026-02-28T02:19:12.494Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/2c8c5ac90dc7d05c6e7d8e72c6a3599dc08cd577ac476898e91ca787d7f1/regex-2026.2.28-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8cf76f1a29f0e99dcfd7aef1551a9827588aae5a737fe31442021165f1920dc", size = 869553, upload-time = "2026-02-28T02:19:15.151Z" }, + { url = "https://files.pythonhosted.org/packages/53/34/daa66a342f0271e7737003abf6c3097aa0498d58c668dbd88362ef94eb5d/regex-2026.2.28-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:180e08a435a0319e6a4821c3468da18dc7001987e1c17ae1335488dfe7518dd8", size = 915289, upload-time = "2026-02-28T02:19:17.331Z" }, + { url = "https://files.pythonhosted.org/packages/c5/c7/e22c2aaf0a12e7e22ab19b004bb78d32ca1ecc7ef245949935463c5567de/regex-2026.2.28-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e496956106fd59ba6322a8ea17141a27c5040e5ee8f9433ae92d4e5204462a0", size = 812156, upload-time = "2026-02-28T02:19:20.011Z" }, + { url = "https://files.pythonhosted.org/packages/7f/bb/2dc18c1efd9051cf389cd0d7a3a4d90f6804b9fff3a51b5dc3c85b935f71/regex-2026.2.28-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bba2b18d70eeb7b79950f12f633beeecd923f7c9ad6f6bae28e59b4cb3ab046b", size = 782215, upload-time = "2026-02-28T02:19:22.047Z" }, + { url = "https://files.pythonhosted.org/packages/17/1e/9e4ec9b9013931faa32226ec4aa3c71fe664a6d8a2b91ac56442128b332f/regex-2026.2.28-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6db7bfae0f8a2793ff1f7021468ea55e2699d0790eb58ee6ab36ae43aa00bc5b", size = 798925, upload-time = "2026-02-28T02:19:24.173Z" }, + { url = "https://files.pythonhosted.org/packages/71/57/a505927e449a9ccb41e2cc8d735e2abe3444b0213d1cf9cb364a8c1f2524/regex-2026.2.28-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d0b02e8b7e5874b48ae0f077ecca61c1a6a9f9895e9c6dfb191b55b242862033", size = 864701, upload-time = "2026-02-28T02:19:26.376Z" }, + { url = "https://files.pythonhosted.org/packages/a6/ad/c62cb60cdd93e13eac5b3d9d6bd5d284225ed0e3329426f94d2552dd7cca/regex-2026.2.28-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:25b6eb660c5cf4b8c3407a1ed462abba26a926cc9965e164268a3267bcc06a43", size = 770899, upload-time = "2026-02-28T02:19:29.38Z" }, + { url = "https://files.pythonhosted.org/packages/3c/5a/874f861f5c3d5ab99633e8030dee1bc113db8e0be299d1f4b07f5b5ec349/regex-2026.2.28-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:5a932ea8ad5d0430351ff9c76c8db34db0d9f53c1d78f06022a21f4e290c5c18", size = 854727, upload-time = "2026-02-28T02:19:31.494Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ca/d2c03b0efde47e13db895b975b2be6a73ed90b8ba963677927283d43bf74/regex-2026.2.28-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:1c2c95e1a2b0f89d01e821ff4de1be4b5d73d1f4b0bf679fa27c1ad8d2327f1a", size = 800366, upload-time = "2026-02-28T02:19:34.248Z" }, + { url = "https://files.pythonhosted.org/packages/14/bd/ee13b20b763b8989f7c75d592bfd5de37dc1181814a2a2747fedcf97e3ba/regex-2026.2.28-cp314-cp314t-win32.whl", hash = "sha256:bbb882061f742eb5d46f2f1bd5304055be0a66b783576de3d7eef1bed4778a6e", size = 274936, upload-time = "2026-02-28T02:19:36.313Z" }, + { url = "https://files.pythonhosted.org/packages/cb/e7/d8020e39414c93af7f0d8688eabcecece44abfd5ce314b21dfda0eebd3d8/regex-2026.2.28-cp314-cp314t-win_amd64.whl", hash = "sha256:6591f281cb44dc13de9585b552cec6fc6cf47fb2fe7a48892295ee9bc4a612f9", size = 284779, upload-time = "2026-02-28T02:19:38.625Z" }, + { url = "https://files.pythonhosted.org/packages/13/c0/ad225f4a405827486f1955283407cf758b6d2fb966712644c5f5aef33d1b/regex-2026.2.28-cp314-cp314t-win_arm64.whl", hash = "sha256:dee50f1be42222f89767b64b283283ef963189da0dda4a515aa54a5563c62dec", size = 275010, upload-time = "2026-02-28T02:19:40.65Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "rich" +version = "14.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, +] + +[[package]] +name = "rlp" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "eth-utils" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1b/2d/439b0728a92964a04d9c88ea1ca9ebb128893fbbd5834faa31f987f2fd4c/rlp-4.1.0.tar.gz", hash = "sha256:be07564270a96f3e225e2c107db263de96b5bc1f27722d2855bd3459a08e95a9", size = 33429, upload-time = "2025-02-04T22:05:59.089Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/fb/e4c0ced9893b84ac95b7181d69a9786ce5879aeb3bbbcbba80a164f85d6a/rlp-4.1.0-py3-none-any.whl", hash = "sha256:8eca394c579bad34ee0b937aecb96a57052ff3716e19c7a578883e767bc5da6f", size = 19973, upload-time = "2025-02-04T22:05:57.05Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/df/f8629c19c5318601d3121e230f74cbee7a3732339c52b21daa2b82ef9c7d/ruff-0.15.6.tar.gz", hash = "sha256:8394c7bb153a4e3811a4ecdacd4a8e6a4fa8097028119160dffecdcdf9b56ae4", size = 4597916, upload-time = "2026-03-12T23:05:47.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/2f/4e03a7e5ce99b517e98d3b4951f411de2b0fa8348d39cf446671adcce9a2/ruff-0.15.6-py3-none-linux_armv6l.whl", hash = "sha256:7c98c3b16407b2cf3d0f2b80c80187384bc92c6774d85fefa913ecd941256fff", size = 10508953, upload-time = "2026-03-12T23:05:17.246Z" }, + { url = "https://files.pythonhosted.org/packages/70/60/55bcdc3e9f80bcf39edf0cd272da6fa511a3d94d5a0dd9e0adf76ceebdb4/ruff-0.15.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ee7dcfaad8b282a284df4aa6ddc2741b3f4a18b0555d626805555a820ea181c3", size = 10942257, upload-time = "2026-03-12T23:05:23.076Z" }, + { url = "https://files.pythonhosted.org/packages/e7/f9/005c29bd1726c0f492bfa215e95154cf480574140cb5f867c797c18c790b/ruff-0.15.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3bd9967851a25f038fc8b9ae88a7fbd1b609f30349231dffaa37b6804923c4bb", size = 10322683, upload-time = "2026-03-12T23:05:33.738Z" }, + { url = "https://files.pythonhosted.org/packages/5f/74/2f861f5fd7cbb2146bddb5501450300ce41562da36d21868c69b7a828169/ruff-0.15.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13f4594b04e42cd24a41da653886b04d2ff87adbf57497ed4f728b0e8a4866f8", size = 10660986, upload-time = "2026-03-12T23:05:53.245Z" }, + { url = "https://files.pythonhosted.org/packages/c1/a1/309f2364a424eccb763cdafc49df843c282609f47fe53aa83f38272389e0/ruff-0.15.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2ed8aea2f3fe57886d3f00ea5b8aae5bf68d5e195f487f037a955ff9fbaac9e", size = 10332177, upload-time = "2026-03-12T23:05:56.145Z" }, + { url = "https://files.pythonhosted.org/packages/30/41/7ebf1d32658b4bab20f8ac80972fb19cd4e2c6b78552be263a680edc55ac/ruff-0.15.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70789d3e7830b848b548aae96766431c0dc01a6c78c13381f423bf7076c66d15", size = 11170783, upload-time = "2026-03-12T23:06:01.742Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/6d488f6adca047df82cd62c304638bcb00821c36bd4881cfca221561fdfc/ruff-0.15.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:542aaf1de3154cea088ced5a819ce872611256ffe2498e750bbae5247a8114e9", size = 12044201, upload-time = "2026-03-12T23:05:28.697Z" }, + { url = "https://files.pythonhosted.org/packages/71/68/e6f125df4af7e6d0b498f8d373274794bc5156b324e8ab4bf5c1b4fc0ec7/ruff-0.15.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c22e6f02c16cfac3888aa636e9eba857254d15bbacc9906c9689fdecb1953ab", size = 11421561, upload-time = "2026-03-12T23:05:31.236Z" }, + { url = "https://files.pythonhosted.org/packages/f1/9f/f85ef5fd01a52e0b472b26dc1b4bd228b8f6f0435975442ffa4741278703/ruff-0.15.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98893c4c0aadc8e448cfa315bd0cc343a5323d740fe5f28ef8a3f9e21b381f7e", size = 11310928, upload-time = "2026-03-12T23:05:45.288Z" }, + { url = "https://files.pythonhosted.org/packages/8c/26/b75f8c421f5654304b89471ed384ae8c7f42b4dff58fa6ce1626d7f2b59a/ruff-0.15.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:70d263770d234912374493e8cc1e7385c5d49376e41dfa51c5c3453169dc581c", size = 11235186, upload-time = "2026-03-12T23:05:50.677Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d4/d5a6d065962ff7a68a86c9b4f5500f7d101a0792078de636526c0edd40da/ruff-0.15.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:55a1ad63c5a6e54b1f21b7514dfadc0c7fb40093fa22e95143cf3f64ebdcd512", size = 10635231, upload-time = "2026-03-12T23:05:37.044Z" }, + { url = "https://files.pythonhosted.org/packages/d6/56/7c3acf3d50910375349016cf33de24be021532042afbed87942858992491/ruff-0.15.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8dc473ba093c5ec238bb1e7429ee676dca24643c471e11fbaa8a857925b061c0", size = 10340357, upload-time = "2026-03-12T23:06:04.748Z" }, + { url = "https://files.pythonhosted.org/packages/06/54/6faa39e9c1033ff6a3b6e76b5df536931cd30caf64988e112bbf91ef5ce5/ruff-0.15.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:85b042377c2a5561131767974617006f99f7e13c63c111b998f29fc1e58a4cfb", size = 10860583, upload-time = "2026-03-12T23:05:58.978Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/509a201b843b4dfb0b32acdedf68d951d3377988cae43949ba4c4133a96a/ruff-0.15.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cef49e30bc5a86a6a92098a7fbf6e467a234d90b63305d6f3ec01225a9d092e0", size = 11410976, upload-time = "2026-03-12T23:05:39.955Z" }, + { url = "https://files.pythonhosted.org/packages/6c/25/3fc9114abf979a41673ce877c08016f8e660ad6cf508c3957f537d2e9fa9/ruff-0.15.6-py3-none-win32.whl", hash = "sha256:bbf67d39832404812a2d23020dda68fee7f18ce15654e96fb1d3ad21a5fe436c", size = 10616872, upload-time = "2026-03-12T23:05:42.451Z" }, + { url = "https://files.pythonhosted.org/packages/89/7a/09ece68445ceac348df06e08bf75db72d0e8427765b96c9c0ffabc1be1d9/ruff-0.15.6-py3-none-win_amd64.whl", hash = "sha256:aee25bc84c2f1007ecb5037dff75cef00414fdf17c23f07dc13e577883dca406", size = 11787271, upload-time = "2026-03-12T23:05:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d0/578c47dd68152ddddddf31cd7fc67dc30b7cdf639a86275fda821b0d9d98/ruff-0.15.6-py3-none-win_arm64.whl", hash = "sha256:c34de3dd0b0ba203be50ae70f5910b17188556630e2178fd7d79fc030eb0d837", size = 11060497, upload-time = "2026-03-12T23:05:25.968Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "textual" +version = "8.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py", extra = ["linkify"] }, + { name = "mdit-py-plugins" }, + { name = "platformdirs" }, + { name = "pygments" }, + { name = "rich" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/23/8c709655c5f2208ee82ab81b8104802421865535c278a7649b842b129db1/textual-8.1.1.tar.gz", hash = "sha256:eef0256a6131f06a20ad7576412138c1f30f92ddeedd055953c08d97044bc317", size = 1843002, upload-time = "2026-03-10T10:01:38.493Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/21/421b02bf5943172b7a9320712a5e0d74a02a8f7597284e3f8b5b06c70b8d/textual-8.1.1-py3-none-any.whl", hash = "sha256:6712f96e335cd782e76193dee16b9c8875fe0699d923bc8d3f1228fd23e773a6", size = 719598, upload-time = "2026-03-10T10:01:48.318Z" }, +] + +[[package]] +name = "toolz" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/d6/114b492226588d6ff54579d95847662fc69196bdeec318eb45393b24c192/toolz-1.1.0.tar.gz", hash = "sha256:27a5c770d068c110d9ed9323f24f1543e83b2f300a687b7891c1a6d56b697b5b", size = 52613, upload-time = "2025-10-17T04:03:21.661Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl", hash = "sha256:15ccc861ac51c53696de0a5d6d4607f99c210739caf987b5d2054f3efed429d8", size = 58093, upload-time = "2025-10-17T04:03:20.435Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, +] + +[[package]] +name = "uc-micro-py" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/78/67/9a363818028526e2d4579334460df777115bdec1bb77c08f9db88f6389f2/uc_micro_py-2.0.0.tar.gz", hash = "sha256:c53691e495c8db60e16ffc4861a35469b0ba0821fe409a8a7a0a71864d33a811", size = 6611, upload-time = "2026-03-01T06:31:27.526Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/73/d21edf5b204d1467e06500080a50f79d49ef2b997c79123a536d4a17d97c/uc_micro_py-2.0.0-py3-none-any.whl", hash = "sha256:3603a3859af53e5a39bc7677713c78ea6589ff188d70f4fee165db88e22b242c", size = 6383, upload-time = "2026-03-01T06:31:26.257Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "websocket-client" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576, upload-time = "2025-10-07T21:16:36.495Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616, upload-time = "2025-10-07T21:16:34.951Z" }, +] + +[[package]] +name = "websockets" +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, +] From 0329804f7191fd574a827d34c282827f5fae9899 Mon Sep 17 00:00:00 2001 From: morfize <233522679+morfize@users.noreply.github.com> Date: Thu, 19 Mar 2026 02:39:25 -0700 Subject: [PATCH 2/8] fix: use actual trade size in stop-loss check instead of regime-dependent recomputation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit check_stop_loss was calling compute_position_size() which uses the current prediction regime, not the trade's actual notional. If the regime shifted after entry (e.g. NORMAL→CRISIS), the method returned 0 and PnL was always 0 — the stop loss would never fire. Now accepts size_usd as a parameter so the caller passes the real trade size. Co-Authored-By: Claude Opus 4.6 (1M context) --- src/perp_bot/risk/manager.py | 9 +++++---- tests/test_risk.py | 11 +++++++++-- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/src/perp_bot/risk/manager.py b/src/perp_bot/risk/manager.py index 56387b8..039cb98 100644 --- a/src/perp_bot/risk/manager.py +++ b/src/perp_bot/risk/manager.py @@ -65,16 +65,17 @@ def compute_position_size( size *= self.config.prediction.position_size_reduction return size - def check_stop_loss(self, entry_price: float, current_price: float, side: str) -> bool: + def check_stop_loss( + self, entry_price: float, current_price: float, side: str, size_usd: float, + ) -> bool: """Check if the capital-based stop-loss threshold is breached. Returns True if the position should be stopped out. """ - position_size = self.compute_position_size() if side == "long": - pnl = (current_price - entry_price) / entry_price * position_size + pnl = (current_price - entry_price) / entry_price * size_usd else: - pnl = (entry_price - current_price) / entry_price * position_size + pnl = (entry_price - current_price) / entry_price * size_usd max_loss = self.trading.capital_usd * self.risk.max_loss_per_trade_pct return pnl <= -max_loss diff --git a/tests/test_risk.py b/tests/test_risk.py index 5f93b6d..fe2dbb6 100644 --- a/tests/test_risk.py +++ b/tests/test_risk.py @@ -78,9 +78,16 @@ def test_stop_loss_triggers(self): db = Database(":memory:") rm = RiskManager(config, db) # Long position: entry 3000, current 2950 → loss = 50/3000 * 1005 = $16.75 - assert not rm.check_stop_loss(3000.0, 2950.0, "long") + assert not rm.check_stop_loss(3000.0, 2950.0, "long", 1005.0) # Entry 3000, current 2900 → loss = 100/3000 * 1005 = $33.50 > $20.10 (3% of 670) - assert rm.check_stop_loss(3000.0, 2900.0, "long") + assert rm.check_stop_loss(3000.0, 2900.0, "long", 1005.0) + + def test_stop_loss_uses_actual_trade_size(self): + config = _test_config() + db = Database(":memory:") + rm = RiskManager(config, db) + # Same 3.33% move, but only half-sized notional -> $16.75 loss, below threshold. + assert not rm.check_stop_loss(3000.0, 2900.0, "long", 502.5) def test_cooldown_blocks_entry(self): config = _test_config() From 49591f8a6bff86cfec2b3b9a8df20ac670a02b66 Mon Sep 17 00:00:00 2001 From: morfize <233522679+morfize@users.noreply.github.com> Date: Thu, 19 Mar 2026 02:39:36 -0700 Subject: [PATCH 3/8] fix: add descending mode to get_candles so screen fetches latest window The screen command fetched the oldest 500 candles (ASC + LIMIT) rather than the most recent. Adding descending=True queries ORDER BY DESC then reverses the result to maintain chronological order, ensuring Hurst exponent calculations use current market data. Co-Authored-By: Claude Opus 4.6 (1M context) --- src/perp_bot/data/db.py | 9 +++++++-- tests/test_db.py | 26 ++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 tests/test_db.py diff --git a/src/perp_bot/data/db.py b/src/perp_bot/data/db.py index d683ea3..cb1e9b3 100644 --- a/src/perp_bot/data/db.py +++ b/src/perp_bot/data/db.py @@ -98,6 +98,7 @@ def get_candles( timeframe: str, start_time: int | None = None, limit: int = 5000, + descending: bool = False, ) -> list[dict]: """Fetch candles ordered by open_time ascending.""" query = "SELECT * FROM candles WHERE symbol = ? AND timeframe = ?" @@ -105,11 +106,15 @@ def get_candles( if start_time is not None: query += " AND open_time >= ?" params.append(start_time) - query += " ORDER BY open_time ASC LIMIT ?" + order = "DESC" if descending else "ASC" + query += f" ORDER BY open_time {order} LIMIT ?" params.append(limit) cur = self.conn.execute(query, params) cols = [d[0] for d in cur.description] - return [dict(zip(cols, row)) for row in cur.fetchall()] + rows = [dict(zip(cols, row)) for row in cur.fetchall()] + if descending: + rows.reverse() + return rows def get_latest_candle_time(self, symbol: str, timeframe: str) -> int | None: """Return the most recent open_time for incremental fetching.""" diff --git a/tests/test_db.py b/tests/test_db.py new file mode 100644 index 0000000..c544ed2 --- /dev/null +++ b/tests/test_db.py @@ -0,0 +1,26 @@ +"""Tests for database access helpers.""" + +from perp_bot.data.db import Database + + +def test_get_candles_descending_returns_latest_window_in_ascending_order(): + db = Database(":memory:") + db.insert_candles([ + { + "symbol": "ETH", + "timeframe": "15m", + "open_time": open_time, + "open": float(open_time), + "high": float(open_time) + 1, + "low": float(open_time) - 1, + "close": float(open_time), + "volume": 100.0, + "num_trades": 10, + } + for open_time in (1_000, 2_000, 3_000, 4_000) + ]) + + candles = db.get_candles("ETH", "15m", limit=2, descending=True) + + assert [c["open_time"] for c in candles] == [3_000, 4_000] + db.close() From 7b6367e55b2eeee226817dd11ec9d6f326ebd06d Mon Sep 17 00:00:00 2001 From: morfize <233522679+morfize@users.noreply.github.com> Date: Thu, 19 Mar 2026 02:39:47 -0700 Subject: [PATCH 4/8] fix: filter paper trades only in paper-vs-backtest comparison compare_paper_vs_backtest included live trades in the paper bucket, inflating or deflating paper metrics. Now filters on is_paper == 1 so the comparison accurately reflects simulated-only performance. Co-Authored-By: Claude Opus 4.6 (1M context) --- src/perp_bot/reporting/compare.py | 5 +- tests/test_compare.py | 90 +++++++++++++++++++++++++++++++ 2 files changed, 94 insertions(+), 1 deletion(-) create mode 100644 tests/test_compare.py diff --git a/src/perp_bot/reporting/compare.py b/src/perp_bot/reporting/compare.py index 0b4ba71..194bf70 100644 --- a/src/perp_bot/reporting/compare.py +++ b/src/perp_bot/reporting/compare.py @@ -27,7 +27,10 @@ def compare_paper_vs_backtest( # --- Paper trade metrics --- paper_trades = db.get_closed_trades_in_range(start_ms, end_ms) - paper_trades = [t for t in paper_trades if t["symbol"] == symbol] + paper_trades = [ + t for t in paper_trades + if t["symbol"] == symbol and t["is_paper"] == 1 + ] paper_stats = _compute_stats(paper_trades, "Paper") # --- Backtest over the same range --- diff --git a/tests/test_compare.py b/tests/test_compare.py new file mode 100644 index 0000000..3b7a98b --- /dev/null +++ b/tests/test_compare.py @@ -0,0 +1,90 @@ +"""Tests for paper-vs-backtest comparison reporting.""" + +from __future__ import annotations + +from types import SimpleNamespace + +from perp_bot.config import ( + BotConfig, + DataConfig, + ExecutionConfig, + RiskConfig, + SignalConfig, + TradingConfig, +) +from perp_bot.data.db import Database +from perp_bot.reporting.compare import compare_paper_vs_backtest + + +def _make_config() -> BotConfig: + return BotConfig( + trading=TradingConfig( + symbols=["ETH"], leverage=3, capital_usd=670.0, margin_usage_limit=0.5, + ), + signals=SignalConfig( + zscore_lookback=20, zscore_entry_threshold=2.0, zscore_exit_threshold=0.3, + zscore_stop_threshold=3.0, bollinger_period=20, bollinger_std=2.0, + rsi_period=14, rsi_overbought=70, rsi_oversold=30, + adx_period=14, adx_threshold=25, + ), + risk=RiskConfig( + max_loss_per_trade_pct=0.03, daily_loss_limit_pct=0.08, + max_positions=1, cooldown_seconds=1800, position_timeout_hours=24, + ), + data=DataConfig( + timeframes=["15m"], primary_timeframe="15m", history_days=90, + db_path=":memory:", + ), + execution=ExecutionConfig( + order_type="limit", taker_fallback_seconds=30, use_server_side_stop=True, + ), + mode="paper", + ) + + +def test_compare_filters_out_live_trades(monkeypatch): + config = _make_config() + db = Database(":memory:") + + paper_trade_id = db.insert_trade({ + "symbol": "ETH", + "side": "long", + "entry_time": 1_000, + "entry_price": 2_500.0, + "size_usd": 1_000.0, + "is_paper": 1, + }) + db.close_trade(paper_trade_id, 2_000, 2_550.0, 20.0, "paper_win") + + live_trade_id = db.insert_trade({ + "symbol": "ETH", + "side": "long", + "entry_time": 3_000, + "entry_price": 2_500.0, + "size_usd": 1_000.0, + "is_paper": 0, + }) + db.close_trade(live_trade_id, 4_000, 2_350.0, -60.0, "live_loss") + + class FakeBacktestEngine: + def __init__(self, *_args, **_kwargs): + pass + + def run(self, *_args, **_kwargs): + return SimpleNamespace(trades=[]) + + monkeypatch.setattr("perp_bot.reporting.compare.BacktestEngine", FakeBacktestEngine) + + report = compare_paper_vs_backtest( + config, db, "ETH", start_ms=0, end_ms=10_000, + ) + + lines = { + line.split()[0]: line.split() + for line in report.splitlines() + if line and line.split()[0] in {"trades", "net_pnl"} + } + + assert lines["trades"][1:] == ["1", "0", "+1"] + assert lines["net_pnl"][1:] == ["20.00$", "0.00$", "+20.00$"] + db.close() From c44ca5488670fa20796938651f16437e75713a52 Mon Sep 17 00:00:00 2001 From: morfize <233522679+morfize@users.noreply.github.com> Date: Thu, 19 Mar 2026 02:39:59 -0700 Subject: [PATCH 5/8] fix: graceful losing-weeks halt, failed entry handling, per-symbol exports Three improvements to main.py: 1. Losing-weeks guard no longer calls sys.exit(1) when live positions are open. Instead sets entries_halted=True and continues managing existing positions, preventing abandoned positions on the exchange. 2. _tick now checks the return value of executor.open_position(). If it returns None (entry failed), the OPEN alert is skipped rather than broadcasting a misleading success message. 3. Backtest CSV export generates per-symbol filenames (trades-ETH.csv, trades-BTC.csv) when backtesting multiple symbols, preventing later symbols from overwriting earlier results. Co-Authored-By: Claude Opus 4.6 (1M context) --- main.py | 72 ++++++++++--- tests/test_main.py | 257 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 316 insertions(+), 13 deletions(-) create mode 100644 tests/test_main.py diff --git a/main.py b/main.py index 1ae05fd..9f1f3f9 100644 --- a/main.py +++ b/main.py @@ -7,6 +7,7 @@ import logging import sys import time +from pathlib import Path import pandas as pd @@ -68,12 +69,8 @@ def run_trading_loop(config_path: str | None = None) -> None: ingestor = DataIngestor(config, db, client) signal_engine = SignalEngine(config.signals) risk_manager = RiskManager(config, db) - - # Auto-stop after 3 consecutive losing weeks (§5.3) - if not _check_losing_weeks(db) and not any(a == "--force" for a in sys.argv): - logger.error("Use --force to override the losing weeks halt") - _alert(config, "BOT HALTED: 3 consecutive losing weeks") - sys.exit(1) + forced = any(a == "--force" for a in sys.argv) + entries_halted = False # Mode-specific setup executor: Executor @@ -96,6 +93,26 @@ def run_trading_loop(config_path: str | None = None) -> None: executor = PaperExecutor(db) logger.info("Paper mode — no real orders") + # Auto-stop after 3 consecutive losing weeks (§5.3) + if not _check_losing_weeks(db) and not forced: + open_trade_count = len(db.get_open_trades()) + if config.mode == "live" and open_trade_count > 0: + entries_halted = True + logger.warning( + "Losing weeks halt active — managing %d existing live position(s)," + " new entries disabled", + open_trade_count, + ) + _alert( + config, + "BOT HALTED: 3 consecutive losing weeks — managing existing" + " live positions only", + ) + else: + logger.error("Use --force to override the losing weeks halt") + _alert(config, "BOT HALTED: 3 consecutive losing weeks") + sys.exit(1) + # WebSocket for real-time prices (both modes benefit) ws_client = WsClient() ws_client.subscribe_mid_prices(config.trading.symbols) @@ -156,7 +173,7 @@ def run_trading_loop(config_path: str | None = None) -> None: config, db, client, ingestor, signal_engine, risk_manager, executor, tf, min_candles, prediction_clients, last_prediction_poll_ms, - ws_client, daemon_state, + ws_client, daemon_state, entries_halted, ) # Update daemon state after tick @@ -278,7 +295,7 @@ def _tick( config, db, client, ingestor, signal_engine, risk_manager, executor, tf, min_candles, prediction_clients=None, last_prediction_poll_ms=0, - ws_client=None, daemon_state=None, + ws_client=None, daemon_state=None, entries_halted=False, ) -> tuple[int, str]: """Single iteration of the trading loop. Returns (last_prediction_poll_ms, regime_label).""" prediction_clients = prediction_clients or {} @@ -324,7 +341,7 @@ def _tick( # Capital-based stop loss if risk_manager.check_stop_loss( - trade["entry_price"], current_price, trade["side"] + trade["entry_price"], current_price, trade["side"], trade["size_usd"], ): pnl = _compute_pnl(trade, current_price) executor.close_position( @@ -371,6 +388,11 @@ def _tick( _alert(config, f"CLOSE {symbol} {result.reason} pnl={pnl:.2f}") elif result.signal in (Signal.LONG, Signal.SHORT) and not open_trades: + if entries_halted: + logger.info( + "Entry blocked by losing weeks halt for %s", symbol, + ) + continue risk_check = risk_manager.check_entry() if risk_check.allowed: size = risk_manager.compute_position_size(prediction_regime) @@ -381,7 +403,15 @@ def _tick( ) continue price = _get_price(ws_client, client, symbol) - executor.open_position(symbol, result.signal.value, size, price) + trade_id = executor.open_position( + symbol, result.signal.value, size, price, + ) + if trade_id is None: + logger.error( + "Entry failed for %s %s — skipping OPEN alert", + result.signal.value, symbol, + ) + continue regime_tag = "" if prediction_regime != PredictionRegime.NORMAL: regime_tag = f" [{prediction_regime.value}]" @@ -582,8 +612,11 @@ def run_backtest(config_path: str | None = None) -> None: result = engine.run(db, symbol) print(result.summary()) if bt_config.export_trades_csv: - result.trades_to_csv(bt_config.export_trades_csv) - logger.info("Trades exported to %s", bt_config.export_trades_csv) + export_path = _trade_export_path( + bt_config.export_trades_csv, symbol, config.trading.symbols, + ) + result.trades_to_csv(export_path) + logger.info("Trades exported to %s", export_path) finally: db.close() @@ -685,7 +718,7 @@ def run_screen(config_path: str | None = None) -> None: # Hurst exponent from historical candles candles = db.get_candles( - symbol, config.data.primary_timeframe, limit=500, + symbol, config.data.primary_timeframe, limit=500, descending=True, ) if len(candles) < 50: continue @@ -734,6 +767,19 @@ def run_compare( db.close() +def _trade_export_path( + raw_path: str, symbol: str, all_symbols: list[str], +) -> str: + """Return a per-symbol export path when backtesting multiple symbols.""" + if len(all_symbols) == 1: + return raw_path + + path = Path(raw_path) + if path.suffix: + return str(path.with_name(f"{path.stem}-{symbol}{path.suffix}")) + return str(path.with_name(f"{path.name}-{symbol}")) + + def run_review(config_path: str | None = None, weeks: int = 1) -> None: """Print a weekly performance review.""" from perp_bot.reporting.weekly import generate_weekly_report diff --git a/tests/test_main.py b/tests/test_main.py new file mode 100644 index 0000000..335e5b7 --- /dev/null +++ b/tests/test_main.py @@ -0,0 +1,257 @@ +"""Tests for main CLI/runtime flows.""" + +from __future__ import annotations + +from pathlib import Path +from types import SimpleNamespace + +import main +from perp_bot.config import ( + BacktestConfig, + BotConfig, + DataConfig, + ExecutionConfig, + RiskConfig, + SignalConfig, + TradingConfig, +) +from perp_bot.signals.engine import Signal + + +def _make_config( + *, + mode: str = "paper", + symbols: list[str] | None = None, + export_trades_csv: str | None = None, +) -> BotConfig: + return BotConfig( + trading=TradingConfig( + symbols=symbols or ["ETH"], leverage=3, + capital_usd=670.0, margin_usage_limit=0.5, + ), + signals=SignalConfig( + zscore_lookback=20, zscore_entry_threshold=2.0, + zscore_exit_threshold=0.3, zscore_stop_threshold=3.0, + bollinger_period=20, bollinger_std=2.0, + rsi_period=14, rsi_overbought=70, rsi_oversold=30, + adx_period=14, adx_threshold=25, + ), + risk=RiskConfig( + max_loss_per_trade_pct=0.03, daily_loss_limit_pct=0.08, + max_positions=1, cooldown_seconds=1800, + position_timeout_hours=24, + ), + data=DataConfig( + timeframes=["15m"], primary_timeframe="15m", + history_days=90, db_path=":memory:", + ), + execution=ExecutionConfig( + order_type="limit", taker_fallback_seconds=30, + use_server_side_stop=True, + ), + mode=mode, + backtest=( + BacktestConfig(export_trades_csv=export_trades_csv) + if export_trades_csv + else None + ), + hl_private_key="0x" + "ab" * 32 if mode == "live" else "", + ) + + +def test_tick_skips_open_alert_when_entry_fails(monkeypatch): + alerts: list[str] = [] + + class FakeDb: + def get_candles(self, *_args, **_kwargs): + return [ + { + "open_time": i, + "open": 100.0, + "high": 101.0, + "low": 99.0, + "close": 100.0 + i, + "volume": 1000.0, + "num_trades": 10, + } + for i in range(3) + ] + + def get_open_trades(self, _symbol=None): + return [] + + class FakeIngestor: + def update_candles(self, _symbol): + pass + + class FakeSignalEngine: + def compute_indicators(self, df): + return df + + def evaluate(self, *_args, **_kwargs): + return SimpleNamespace( + signal=Signal.LONG, + reason="entry", + zscore_value=2.5, + rsi_value=75.0, + adx_value=10.0, + price=100.0, + ) + + class FakeRiskManager: + def check_entry(self): + return SimpleNamespace(allowed=True, reason="ok") + + def compute_position_size(self, *_args, **_kwargs): + return 1_000.0 + + class FakeExecutor: + _sl_failed = False + + def open_position(self, *_args, **_kwargs): + return None + + monkeypatch.setattr(main, "_get_price", lambda *_args, **_kwargs: 100.0) + monkeypatch.setattr(main, "_alert", lambda _config, message: alerts.append(message)) + + last_prediction_poll_ms, regime = main._tick( + _make_config(), + FakeDb(), + client=object(), + ingestor=FakeIngestor(), + signal_engine=FakeSignalEngine(), + risk_manager=FakeRiskManager(), + executor=FakeExecutor(), + tf="15m", + min_candles=2, + ) + + assert alerts == [] + assert last_prediction_poll_ms == 0 + assert regime == "normal" + + +def test_run_backtest_exports_distinct_files_per_symbol(monkeypatch): + exported_paths: list[str] = [] + + class FakeResult: + def summary(self): + return "summary" + + def trades_to_csv(self, path): + exported_paths.append(path) + + class FakeEngine: + def __init__(self, *_args, **_kwargs): + pass + + def run(self, _db, _symbol): + return FakeResult() + + class FakeDb: + def __init__(self, _path): + pass + + def close(self): + pass + + monkeypatch.setattr( + main, + "load_config", + lambda _path: _make_config( + symbols=["ETH", "BTC"], export_trades_csv="trades.csv", + ), + ) + monkeypatch.setattr(main, "BacktestEngine", FakeEngine) + monkeypatch.setattr(main, "Database", FakeDb) + + main.run_backtest() + + assert exported_paths == ["trades-ETH.csv", "trades-BTC.csv"] + + +def test_run_trading_loop_reconciles_before_losing_weeks_halt(monkeypatch): + call_order: list[str] = [] + + class FakeDb: + last_instance = None + + def __init__(self, _path): + self.open_trades = [] + FakeDb.last_instance = self + + def get_open_trades(self, _symbol=None): + return list(self.open_trades) + + def close(self): + pass + + class FakeExecutor: + def __init__(self, _config, _db): + pass + + def set_leverage(self, _symbol, _leverage): + return True + + class FakeWsClient: + def subscribe_mid_prices(self, _symbols): + pass + + def is_healthy(self): + return True + + def close(self): + pass + + class FakeHealthChecker: + def __init__(self, *_args, **_kwargs): + pass + + def tick(self, _label): + pass + + class FakeStateServer: + def __init__(self, *_args, **_kwargs): + pass + + def start(self): + pass + + def stop(self): + pass + + def fake_reconcile(_executor, db, _config): + call_order.append("reconcile") + db.open_trades.append({"id": 1, "symbol": "ETH", "side": "long"}) + + def fake_check_losing_weeks(_db): + call_order.append("check") + return False + + def fake_tick(*args, **_kwargs): + call_order.append("tick") + assert args[-1] is True + raise KeyboardInterrupt + + monkeypatch.setattr(main, "load_config", lambda _path: _make_config(mode="live")) + monkeypatch.setattr(main, "Database", FakeDb) + monkeypatch.setattr(main, "HyperliquidClient", lambda: object()) + monkeypatch.setattr(main, "DataIngestor", lambda *_args, **_kwargs: object()) + monkeypatch.setattr(main, "SignalEngine", lambda *_args, **_kwargs: object()) + monkeypatch.setattr(main, "RiskManager", lambda *_args, **_kwargs: object()) + monkeypatch.setattr(main, "LiveExecutor", FakeExecutor) + monkeypatch.setattr(main, "WsClient", FakeWsClient) + monkeypatch.setattr(main, "HealthChecker", FakeHealthChecker) + monkeypatch.setattr(main, "DaemonStateServer", FakeStateServer) + monkeypatch.setattr(main, "setup_logging", lambda *args, **kwargs: None) + monkeypatch.setattr(main, "_init_prediction_clients", lambda _config: {}) + monkeypatch.setattr(main, "_reconcile_positions", fake_reconcile) + monkeypatch.setattr(main, "_check_losing_weeks", fake_check_losing_weeks) + monkeypatch.setattr(main, "_tick", fake_tick) + monkeypatch.setattr(main, "_alert", lambda *_args, **_kwargs: None) + monkeypatch.setattr(main, "get_socket_path", lambda _db_path: Path("/tmp/perp-bot.sock")) + monkeypatch.setattr(main.sys, "argv", ["main.py", "trade"]) + + main.run_trading_loop() + + assert call_order == ["reconcile", "check", "tick"] From cedb2f3e606aa513c3e4ec4fa4785c6923820fe0 Mon Sep 17 00:00:00 2001 From: morfize <233522679+morfize@users.noreply.github.com> Date: Thu, 19 Mar 2026 03:37:42 -0700 Subject: [PATCH 6/8] refactor: migrate CLI from script to installable entry point Move all CLI logic from root main.py into src/perp_bot/cli.py and register a console_scripts entry point (`perpbot`) in pyproject.toml. main.py becomes a thin compatibility shim. The --force flag is now passed as a function parameter instead of scanning sys.argv. Also adds: README with install/usage docs, MIT license, changelog, GitHub Actions CI/release workflows, pyproject.toml metadata (classifiers, URLs, sdist excludes), config.py CWD-relative path resolution, and tests for the new CLI parameter passing. Co-Authored-By: Claude Opus 4.6 (1M context) --- .claude/settings.local.json | 19 + .github/workflows/ci.yml | 55 +++ .github/workflows/release.yml | 30 ++ CHANGELOG.md | 22 + CLAUDE.md | 22 +- LICENSE | 21 + README.md | 77 ++++ deploy/perp-bot.service | 2 +- main.py | 835 +-------------------------------- pyproject.toml | 37 ++ src/perp_bot/cli.py | 837 ++++++++++++++++++++++++++++++++++ src/perp_bot/config.py | 13 +- tests/test_config.py | 70 +++ tests/test_main.py | 20 +- 14 files changed, 1207 insertions(+), 853 deletions(-) create mode 100644 .claude/settings.local.json create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/release.yml create mode 100644 CHANGELOG.md create mode 100644 LICENSE create mode 100644 src/perp_bot/cli.py create mode 100644 tests/test_config.py diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 0000000..7d4dbba --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,19 @@ +{ + "permissions": { + "allow": [ + "Bash(find /Users/cotoneum/project/perp-bot/src -type f -name *.py)", + "Bash(find /Users/cotoneum/project/perp-bot/tests -type f -name *.py)", + "Bash(grep -r \"websocket_manager\\\\|WebSocket\\\\|ws://\" /Users/cotoneum/project/perp-bot/src --include=*.py)", + "Bash(/tmp/tree_output.txt:*)", + "Read(//tmp/**)", + "Bash(gh repo:*)", + "Bash(git rm:*)", + "Bash(git rebase:*)" + ] + }, + "outputStyle": "Explanatory", + "sandbox": { + "enabled": true, + "autoAllowBashIfSandboxed": true + } +} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..4f5acf9 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,55 @@ +name: CI + +on: + push: + branches: + - main + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.12", "3.13"] + + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Set up uv + uses: astral-sh/setup-uv@v5 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: uv sync --frozen --group dev + + - name: Lint + run: uv run ruff check src tests + + - name: Run tests + run: uv run pytest + + build: + runs-on: ubuntu-latest + + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Set up uv + uses: astral-sh/setup-uv@v5 + + - name: Build distributions + run: uv build + + - name: Upload build artifacts + uses: actions/upload-artifact@v4 + with: + name: dist + path: dist/* diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..0b41523 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,30 @@ +name: Release + +on: + push: + tags: + - "v*" + workflow_dispatch: + +permissions: + contents: write + +jobs: + release: + runs-on: ubuntu-latest + + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Set up uv + uses: astral-sh/setup-uv@v5 + + - name: Build distributions + run: uv build + + - name: Publish GitHub release + uses: softprops/action-gh-release@v2 + with: + files: dist/* + generate_release_notes: true diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..c20170d --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,22 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on Keep a Changelog, and this project follows Semantic Versioning. + +## [0.1.0] - 2026-03-19 + +### Added + +- Packaged CLI entry point as `perpbot` with GitHub-installable build artifacts. +- Config loading that works from the current working directory or an explicit `--config` path. +- Public package metadata, README install instructions, and release automation scaffolding. + +### Fixed + +- Losing-weeks halt now reconciles live positions before halting new entries. +- Live stop-loss now uses actual trade notional instead of recomputed regime size. +- Paper-vs-backtest reporting now excludes live trades. +- Symbol screening now uses the latest candle window. +- Live `OPEN` alerts are only emitted after a successful entry. +- Multi-symbol backtest CSV exports no longer overwrite each other. diff --git a/CLAUDE.md b/CLAUDE.md index 77152eb..44d7744 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -61,17 +61,17 @@ uv sync # Install dependencies uv run pytest tests/ -v # Run all tests uv run pytest tests/test_indicators.py -v # Run a single test file uv run ruff check src/ tests/ # Lint -uv run python main.py backfill # Backfill historical data from Hyperliquid -uv run python main.py trade # Start trading loop (paper mode by default, set mode: "live" in config.yaml for real orders) -uv run python main.py backfill-predictions # Fetch current prediction market snapshots -uv run python main.py backtest # Run backtest over historical data -uv run python main.py walkforward # Walk-forward overfitting analysis -uv run python main.py sensitivity # Parameter sensitivity sweep -uv run python main.py screen # Screen symbols by Hurst exponent for mean-reversion fit -uv run python main.py review --weeks 1 # Weekly performance report -uv run python main.py compare --days 7 # Compare paper trades vs backtest over same period -uv run python main.py tui # Launch TUI dashboard (attach to running daemon) -uv run python main.py status # One-shot daemon state query (JSON output) +perpbot backfill # Backfill historical data from Hyperliquid +perpbot trade # Start trading loop (paper mode by default, set mode: "live" in config.yaml for real orders) +perpbot backfill-predictions # Fetch current prediction market snapshots +perpbot backtest # Run backtest over historical data +perpbot walkforward # Walk-forward overfitting analysis +perpbot sensitivity # Parameter sensitivity sweep +perpbot screen # Screen symbols by Hurst exponent for mean-reversion fit +perpbot review --weeks 1 # Weekly performance report +perpbot compare --days 7 # Compare paper trades vs backtest over same period +perpbot tui # Launch TUI dashboard (attach to running daemon) +perpbot status # One-shot daemon state query (JSON output) ``` ## Conventions diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..3f592b0 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2026 morfize + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index e69de29..120271a 100644 --- a/README.md +++ b/README.md @@ -0,0 +1,77 @@ +# perpbot + +`perpbot` is a Python CLI for running a Hyperliquid mean-reversion perpetual futures bot, backfilling market data, backtesting, screening symbols, and monitoring a running daemon. + +## Release status + +- Python package with console entry point: `perpbot` +- GitHub Actions CI for lint, tests, and build validation +- GitHub tag-based release workflow that uploads wheel and source distribution artifacts + +## Install + +Local development install: + +```bash +uv sync +uv run perpbot --help +``` + +Install directly from GitHub: + +```bash +uv tool install git+https://github.com/morfize/perp-bot.git +perpbot --help +``` + +Or with `pip`: + +```bash +pip install "git+https://github.com/morfize/perp-bot.git" +perpbot --help +``` + +Install a tagged release: + +```bash +uv tool install git+https://github.com/morfize/perp-bot.git@v0.1.0 +``` + +## Configuration + +By default, `perpbot` looks for these files in your current working directory: + +- `config.yaml` +- `.env` + +You can also point the CLI at a specific config file: + +```bash +perpbot trade --config /path/to/config.yaml +``` + +When `--config` is provided, `perpbot` also loads `.env` from that config file's directory. + +The repository includes: + +- `config.yaml` as a starting config +- `.env.example` as an environment variable template +- `CHANGELOG.md` for release notes + +## CLI usage + +```bash +perpbot --help +perpbot trade +perpbot trade --force +perpbot backfill +perpbot backfill-predictions +perpbot backtest +perpbot walkforward +perpbot sensitivity +perpbot screen +perpbot review --weeks 1 +perpbot compare --days 7 +perpbot tui +perpbot status +``` diff --git a/deploy/perp-bot.service b/deploy/perp-bot.service index 0d4c6e7..1967b2f 100644 --- a/deploy/perp-bot.service +++ b/deploy/perp-bot.service @@ -9,7 +9,7 @@ User=perp-bot Group=perp-bot WorkingDirectory=/opt/perp-bot EnvironmentFile=/opt/perp-bot/.env -ExecStart=/opt/perp-bot/.venv/bin/python main.py trade +ExecStart=/opt/perp-bot/.venv/bin/perpbot trade Restart=always RestartSec=10 WatchdogSec=300 diff --git a/main.py b/main.py index 9f1f3f9..1268345 100644 --- a/main.py +++ b/main.py @@ -1,837 +1,6 @@ -"""Main entry point — data backfill and trading loop.""" +"""Compatibility shim for direct ``python main.py ...`` usage.""" -from __future__ import annotations - -import argparse -import json -import logging -import sys -import time -from pathlib import Path - -import pandas as pd - -from perp_bot.backtest.config import BacktestConfig -from perp_bot.backtest.engine import BacktestEngine -from perp_bot.backtest.sensitivity import ParameterSensitivityAnalyzer -from perp_bot.backtest.walk_forward import WalkForwardRunner -from perp_bot.config import load_config -from perp_bot.data.client import HyperliquidClient, INTERVAL_MS -from perp_bot.data.db import Database -from perp_bot.data.ingest import DataIngestor -from perp_bot.data.prediction_client import KalshiClient, PolymarketClient -from perp_bot.data.ws_client import WsClient -from perp_bot.execution.executor import Executor, PaperExecutor -from perp_bot.execution.live_executor import LiveExecutor -from perp_bot.infra.alerts import send_discord_alert, send_telegram_alert -from perp_bot.infra.health import HealthChecker -from perp_bot.infra.logging import setup_logging -from perp_bot.ipc.protocol import get_socket_path -from perp_bot.ipc.server import DaemonStateServer -from perp_bot.ipc.state import DaemonState -from perp_bot.risk.manager import RiskManager -from perp_bot.signals.engine import Signal, SignalEngine -from perp_bot.signals.prediction import ( - PredictionRegime, - compute_regime, - funding_side_preference, - rate_change_score, - war_risk_score, -) - -logger = logging.getLogger(__name__) - - -def run_backfill(config_path: str | None = None) -> None: - """One-shot: backfill historical data into SQLite.""" - config = load_config(config_path) - db = Database(config.data.db_path) - client = HyperliquidClient() - ingestor = DataIngestor(config, db, client) - - try: - ingestor.run_full_backfill() - logger.info("Backfill complete") - finally: - db.close() - - -def run_trading_loop(config_path: str | None = None) -> None: - """Main trading loop — runs until interrupted. - - Supports 'paper' and 'live' modes. In live mode, uses LiveExecutor - for real order placement and WebSocket for real-time price feeds. - Exposes runtime state via Unix socket for TUI attachment. - """ - config = load_config(config_path) - db = Database(config.data.db_path) - client = HyperliquidClient() - ingestor = DataIngestor(config, db, client) - signal_engine = SignalEngine(config.signals) - risk_manager = RiskManager(config, db) - forced = any(a == "--force" for a in sys.argv) - entries_halted = False - - # Mode-specific setup - executor: Executor - ws_client: WsClient | None = None - - if config.mode == "live": - if not config.hl_private_key: - logger.error("HL_PRIVATE_KEY required for live mode") - sys.exit(1) - executor = LiveExecutor(config, db) - # Set leverage — refuse to start if it fails - for sym in config.trading.symbols: - if not executor.set_leverage(sym, config.trading.leverage): - logger.error("Failed to set leverage for %s — aborting live mode", sym) - sys.exit(1) - # Reconcile exchange positions against DB - _reconcile_positions(executor, db, config) - logger.info("Live mode — real orders will be placed") - else: - executor = PaperExecutor(db) - logger.info("Paper mode — no real orders") - - # Auto-stop after 3 consecutive losing weeks (§5.3) - if not _check_losing_weeks(db) and not forced: - open_trade_count = len(db.get_open_trades()) - if config.mode == "live" and open_trade_count > 0: - entries_halted = True - logger.warning( - "Losing weeks halt active — managing %d existing live position(s)," - " new entries disabled", - open_trade_count, - ) - _alert( - config, - "BOT HALTED: 3 consecutive losing weeks — managing existing" - " live positions only", - ) - else: - logger.error("Use --force to override the losing weeks halt") - _alert(config, "BOT HALTED: 3 consecutive losing weeks") - sys.exit(1) - - # WebSocket for real-time prices (both modes benefit) - ws_client = WsClient() - ws_client.subscribe_mid_prices(config.trading.symbols) - - # Health checker for periodic heartbeat - health_checker = HealthChecker(config, db, ws_client, executor) - - # Prediction market clients - prediction_clients = _init_prediction_clients(config) - - # IPC: daemon state + socket server for TUI attachment - daemon_state = DaemonState(mode=config.mode) - socket_path = get_socket_path(config.data.db_path) - state_server = DaemonStateServer(socket_path, daemon_state, executor, db) - state_server.start() - - # Set up file logging for TUI log tailing - log_file = str(socket_path.parent / "perp-bot.log") - setup_logging(log_file=log_file) - - tf = config.data.primary_timeframe - interval_ms = INTERVAL_MS[tf] - min_candles = max( - config.signals.zscore_lookback, - config.signals.bollinger_period, - config.signals.rsi_period, - config.signals.adx_period * 2, # ADX needs more warmup - ) - - logger.info("Starting trading loop — mode=%s, timeframe=%s", config.mode, tf) - if config.discord_webhook_url: - send_discord_alert( - config.discord_webhook_url, - f"Bot started — mode={config.mode}", - ) - - last_prediction_poll_ms = 0 - prediction_regime_label = "NORMAL" - - try: - while True: - # WebSocket health check — reconnect if stale - if ws_client and not ws_client.is_healthy(): - logger.warning("WebSocket stale — triggering reconnect") - _alert(config, "WS RECONNECT: price feed was stale") - ws_client.reconnect() - - # Check pause state — skip tick but still monitor health - if daemon_state.paused: - daemon_state.update( - ws_healthy=ws_client.is_healthy() if ws_client else False, - ) - health_checker.tick(prediction_regime_label) - _sleep_until_next_candle(interval_ms) - continue - - last_prediction_poll_ms, prediction_regime_label = _tick( - config, db, client, ingestor, signal_engine, - risk_manager, executor, tf, min_candles, - prediction_clients, last_prediction_poll_ms, - ws_client, daemon_state, entries_halted, - ) - - # Update daemon state after tick - _update_daemon_state( - daemon_state, config, ws_client, risk_manager, - executor, prediction_regime_label, db, - ) - - health_checker.tick(prediction_regime_label) - _sleep_until_next_candle(interval_ms) - except KeyboardInterrupt: - logger.info("Shutting down") - finally: - state_server.stop() - if ws_client: - ws_client.close() - db.close() - - -def _reconcile_positions( - executor: LiveExecutor, db: Database, config, -) -> None: - """Reconcile exchange positions against DB on startup. - - - Exchange has position but DB doesn't → create DB record from exchange state - - DB has open position but exchange doesn't → close DB record as reconciled - """ - for symbol in config.trading.symbols: - exchange_pos = executor.get_exchange_position(symbol) - db_trades = db.get_open_trades(symbol) - - if exchange_pos and not db_trades: - # Exchange has a position we don't know about - now = int(time.time() * 1000) - trade_id = db.insert_trade({ - "symbol": symbol, - "side": exchange_pos["side"], - "entry_time": now, - "entry_price": exchange_pos["entry_price"], - "size_usd": exchange_pos["entry_price"] * exchange_pos["size_base"], - "is_paper": 0, - }) - logger.warning( - "RECONCILED: Found exchange position %s %s (%.4f @ %.2f) " - "not in DB — created trade #%d", - exchange_pos["side"], symbol, - exchange_pos["size_base"], exchange_pos["entry_price"], - trade_id, - ) - _alert( - config, - f"RECONCILE: Adopted orphan {exchange_pos['side']} " - f"{symbol} @ {exchange_pos['entry_price']:.2f}", - ) - - elif db_trades and not exchange_pos: - # DB thinks we have a position but exchange doesn't - for trade in db_trades: - now = int(time.time() * 1000) - db.close_trade( - trade["id"], now, trade["entry_price"], 0.0, - "reconciled_missing", - ) - logger.warning( - "RECONCILED: DB trade #%d (%s %s) has no exchange position — " - "closed as reconciled_missing", - trade["id"], trade["side"], symbol, - ) - _alert( - config, - f"RECONCILE: Closed {len(db_trades)} orphan DB trade(s) for {symbol}", - ) - - elif exchange_pos and db_trades: - logger.info( - "Position sync OK: %s %s matches DB trade #%d", - exchange_pos["side"], symbol, db_trades[0]["id"], - ) - - -def _check_losing_weeks(db: Database, num_weeks: int = 3) -> bool: - """Check if the last N weeks were all net-negative. Returns True if safe to start.""" - now_ms = int(time.time() * 1000) - week_ms = 7 * 24 * 3600 * 1000 - - losing_count = 0 - for i in range(num_weeks): - end = now_ms - i * week_ms - start = end - week_ms - trades = db.get_closed_trades_in_range(start, end) - if not trades: - return True # Not enough history — safe to start - weekly_pnl = sum(t.get("pnl", 0) or 0 for t in trades) - if weekly_pnl < 0: - losing_count += 1 - - if losing_count >= num_weeks: - logger.error( - "HALTED: %d consecutive losing weeks detected", num_weeks, - ) - return False - return True - - -def _init_prediction_clients(config) -> dict: - """Create prediction market client instances based on configured sources.""" - clients: dict = {} - if not config.prediction or not config.prediction.enabled: - return clients - sources = {m.source for m in config.prediction.markets} - if "polymarket" in sources: - clients["polymarket"] = PolymarketClient() - if "kalshi" in sources: - clients["kalshi"] = KalshiClient() - return clients - - -def _tick( - config, db, client, ingestor, signal_engine, - risk_manager, executor, tf, min_candles, - prediction_clients=None, last_prediction_poll_ms=0, - ws_client=None, daemon_state=None, entries_halted=False, -) -> tuple[int, str]: - """Single iteration of the trading loop. Returns (last_prediction_poll_ms, regime_label).""" - prediction_clients = prediction_clients or {} - - # --- Prediction market polling --- - prediction_regime = PredictionRegime.NORMAL - preferred_side = None - now_ms = int(time.time() * 1000) - - if config.prediction and config.prediction.enabled and prediction_clients: - poll_interval_ms = config.prediction.poll_interval_minutes * 60_000 - if now_ms - last_prediction_poll_ms >= poll_interval_ms: - last_prediction_poll_ms = now_ms - ingestor.update_predictions(prediction_clients, config.prediction) - - # Compute regime from latest cached data - prediction_regime, preferred_side = _compute_prediction_state(db, config) - - for symbol in config.trading.symbols: - # Update candles - ingestor.update_candles(symbol) - - # Load candles into DataFrame - candles = db.get_candles(symbol, tf, limit=min_candles + 50) - if len(candles) < min_candles: - logger.warning( - "Not enough candles for %s %s (%d < %d)", - symbol, tf, len(candles), min_candles, - ) - continue - - df = pd.DataFrame(candles) - df = signal_engine.compute_indicators(df) - - # Current position state - open_trades = db.get_open_trades(symbol) - position_side = open_trades[0]["side"] if open_trades else None - - # Check position-level exits first - if open_trades: - trade = open_trades[0] - current_price = _get_price(ws_client, client, symbol) - - # Capital-based stop loss - if risk_manager.check_stop_loss( - trade["entry_price"], current_price, trade["side"], trade["size_usd"], - ): - pnl = _compute_pnl(trade, current_price) - executor.close_position( - trade["id"], symbol, current_price, pnl, - "capital_stop_loss", - ) - risk_manager.record_stop_loss() - _alert(config, f"STOP LOSS {symbol} pnl={pnl:.2f}") - continue - - # Position timeout - if risk_manager.check_position_timeout(trade["entry_time"]): - pnl = _compute_pnl(trade, current_price) - executor.close_position( - trade["id"], symbol, current_price, pnl, - "timeout_24h", - ) - _alert(config, f"TIMEOUT {symbol} pnl={pnl:.2f}") - continue - - # Signal evaluation with prediction regime - result = signal_engine.evaluate( - df, position_side, prediction_regime, preferred_side, - ) - - # Expose signal to daemon state for TUI - if daemon_state is not None: - daemon_state.latest_signals[symbol] = { - "signal": result.signal.value, - "reason": result.reason, - "zscore": result.zscore_value, - "rsi": result.rsi_value, - "adx": result.adx_value, - "price": result.price, - } - - if result.signal == Signal.CLOSE and open_trades: - trade = open_trades[0] - current_price = _get_price(ws_client, client, symbol) - pnl = _compute_pnl(trade, current_price) - executor.close_position( - trade["id"], symbol, current_price, pnl, result.reason, - ) - _alert(config, f"CLOSE {symbol} {result.reason} pnl={pnl:.2f}") - - elif result.signal in (Signal.LONG, Signal.SHORT) and not open_trades: - if entries_halted: - logger.info( - "Entry blocked by losing weeks halt for %s", symbol, - ) - continue - risk_check = risk_manager.check_entry() - if risk_check.allowed: - size = risk_manager.compute_position_size(prediction_regime) - if size <= 0: - logger.info( - "Position size zero — regime=%s", - prediction_regime.value, - ) - continue - price = _get_price(ws_client, client, symbol) - trade_id = executor.open_position( - symbol, result.signal.value, size, price, - ) - if trade_id is None: - logger.error( - "Entry failed for %s %s — skipping OPEN alert", - result.signal.value, symbol, - ) - continue - regime_tag = "" - if prediction_regime != PredictionRegime.NORMAL: - regime_tag = f" [{prediction_regime.value}]" - _alert( - config, - f"OPEN {result.signal.value} {symbol}" - f" @ {price:.2f} size=${size:.0f}" - f"{regime_tag}", - ) - # Alert if server-side SL placement failed - if ( - isinstance(executor, LiveExecutor) - and executor._sl_failed - ): - _alert( - config, - f"CRITICAL: {symbol} UNHEDGED — " - f"server-side SL failed!", - ) - else: - logger.info("Entry blocked: %s", risk_check.reason) - - return last_prediction_poll_ms, prediction_regime.value - - -def _get_price(ws_client, rest_client, symbol: str) -> float: - """Get mid price from WebSocket cache, falling back to REST.""" - if ws_client is not None: - ws_price = ws_client.get_mid_price(symbol) - if ws_price is not None: - return ws_price - return rest_client.get_mid_price(symbol) - - -def _compute_prediction_state(db, config): - """Compute prediction regime and preferred side from latest DB snapshots.""" - snapshots = db.get_latest_predictions() - if not snapshots: - return PredictionRegime.NORMAL, None - - war_snapshots = [s for s in snapshots if s["category"] == "war_risk"] - rate_snapshots = [s for s in snapshots if s["category"] == "rate_change"] - - market_weights = { - m.slug: m.weight - for m in config.prediction.markets - if m.category == "war_risk" - } - w_risk = war_risk_score(war_snapshots, market_weights) - r_change = rate_change_score(rate_snapshots) - - regime = compute_regime(w_risk, r_change, config.prediction) - preferred = funding_side_preference(r_change, config.prediction.rate_change_threshold) - - if regime != PredictionRegime.NORMAL: - logger.info( - "Prediction regime=%s war_risk=%.3f rate_change=%.3f preferred=%s", - regime.value, w_risk, r_change, preferred, - ) - - return regime, preferred - - -def _compute_pnl(trade: dict, current_price: float) -> float: - """Compute unrealised P&L for a trade.""" - if trade["side"] == "long": - return (current_price - trade["entry_price"]) / trade["entry_price"] * trade["size_usd"] - else: - return (trade["entry_price"] - current_price) / trade["entry_price"] * trade["size_usd"] - - -def _alert(config, message: str) -> None: - logger.info(message) - if config.discord_webhook_url: - send_discord_alert(config.discord_webhook_url, message) - if config.telegram_bot_token and config.telegram_chat_id: - send_telegram_alert( - config.telegram_bot_token, config.telegram_chat_id, message, - ) - - -def _sleep_until_next_candle(interval_ms: int) -> None: - """Sleep until the next candle boundary plus a small buffer.""" - now_ms = int(time.time() * 1000) - next_candle = ((now_ms // interval_ms) + 1) * interval_ms - sleep_secs = (next_candle - now_ms) / 1000 + 5 # 5s buffer for candle to finalise - logger.debug("Sleeping %.1fs until next candle", sleep_secs) - time.sleep(sleep_secs) - - -def _update_daemon_state( - daemon_state: DaemonState, - config, ws_client, risk_manager, executor, - prediction_regime_label: str, db, -) -> None: - """Refresh the daemon state after a tick for IPC clients.""" - now_ms = int(time.time() * 1000) - - # Mid prices - mid_prices = {} - if ws_client: - for sym in config.trading.symbols: - p = ws_client.get_mid_price(sym) - if p is not None: - mid_prices[sym] = p - - # Risk check - risk_check = risk_manager.check_entry() - - # Cooldown remaining - cooldown_s = 0.0 - if risk_manager._last_stop_loss_time is not None: - elapsed = now_ms - risk_manager._last_stop_loss_time - remaining = risk_manager.risk.cooldown_seconds * 1000 - elapsed - if remaining > 0: - cooldown_s = remaining / 1000 - - # Daily PnL - day_start = (int(time.time()) - int(time.time()) % 86400) * 1000 - daily_pnl = db.get_daily_pnl(day_start) - - # Slippage - slippage_stats = {"count": 0, "avg_pct": 0.0, "max_pct": 0.0} - if hasattr(executor, "get_slippage_stats"): - slippage_stats = executor.get_slippage_stats() - - daemon_state.update( - tick_count=daemon_state.tick_count + 1, - last_tick_ms=now_ms, - mid_prices=mid_prices, - ws_healthy=ws_client.is_healthy() if ws_client else False, - prediction_regime=prediction_regime_label, - risk_allowed=risk_check.allowed, - risk_reason=risk_check.reason, - cooldown_remaining_s=cooldown_s, - daily_pnl=daily_pnl, - slippage_stats=slippage_stats, - ) - - -def run_status(config_path: str | None = None) -> None: - """One-shot: connect to daemon, print formatted state, exit.""" - from perp_bot.ipc.client import DaemonClient - - config = load_config(config_path) - socket_path = get_socket_path(config.data.db_path) - client = DaemonClient(socket_path) - - if not client.is_running(): - print("Daemon is not running (socket not found or not responding)") - sys.exit(1) - - state = client.get_state() - if state is None: - print("Failed to get state from daemon") - sys.exit(1) - - print(json.dumps(state, indent=2)) - - -def run_tui(config_path: str | None = None) -> None: - """Launch the TUI dashboard connecting to a running daemon.""" - from perp_bot.tui.app import PerpBotApp - - config = load_config(config_path) - app = PerpBotApp(config) - app.run() - - -def run_backfill_predictions(config_path: str | None = None) -> None: - """One-shot: fetch current prediction market snapshots into SQLite.""" - config = load_config(config_path) - if not config.prediction or not config.prediction.enabled: - logger.error("Prediction markets not configured or disabled") - sys.exit(1) - - db = Database(config.data.db_path) - clients = _init_prediction_clients(config) - ingestor = DataIngestor(config, db, HyperliquidClient()) - - try: - count = ingestor.update_predictions(clients, config.prediction) - logger.info("Inserted %d prediction snapshots", count) - finally: - db.close() - - -def run_backtest(config_path: str | None = None) -> None: - """Run a backtest over all historical data.""" - config = load_config(config_path) - bt_config = config.backtest or BacktestConfig() - db = Database(config.data.db_path) - - try: - engine = BacktestEngine(config, bt_config) - for symbol in config.trading.symbols: - logger.info("Running backtest for %s", symbol) - result = engine.run(db, symbol) - print(result.summary()) - if bt_config.export_trades_csv: - export_path = _trade_export_path( - bt_config.export_trades_csv, symbol, config.trading.symbols, - ) - result.trades_to_csv(export_path) - logger.info("Trades exported to %s", export_path) - finally: - db.close() - - -def run_walkforward(config_path: str | None = None) -> None: - """Run walk-forward analysis.""" - config = load_config(config_path) - bt_config = config.backtest or BacktestConfig() - db = Database(config.data.db_path) - - try: - for symbol in config.trading.symbols: - # Find data range - candles = db.get_candles(symbol, config.data.primary_timeframe, limit=1) - if not candles: - logger.warning("No data for %s", symbol) - continue - first_time = candles[0]["open_time"] - latest = db.get_latest_candle_time(symbol, config.data.primary_timeframe) - if latest is None: - continue - - runner = WalkForwardRunner(config, bt_config) - result = runner.run(db, symbol, first_time, latest) - print(result.summary()) - finally: - db.close() - - -def run_sensitivity(config_path: str | None = None) -> None: - """Run parameter sensitivity analysis.""" - config = load_config(config_path) - bt_config = config.backtest or BacktestConfig() - db = Database(config.data.db_path) - - try: - analyzer = ParameterSensitivityAnalyzer(config, bt_config) - for symbol in config.trading.symbols: - logger.info("Running sensitivity analysis for %s", symbol) - report = analyzer.run(db, symbol) - print(report.summary()) - finally: - db.close() - - -def run_screen(config_path: str | None = None) -> None: - """Screen symbols for mean-reversion suitability (§4.5). - - Criteria: Hurst < 0.5, 24h volume > $50M, tight spread. - """ - from perp_bot.signals.indicators import hurst_exponent - - config = load_config(config_path) - db = Database(config.data.db_path) - client = HyperliquidClient() - - try: - meta = client.get_asset_meta() - if not meta or len(meta) < 2: - print("Failed to fetch asset metadata") - return - - universe = meta[0].get("universe", []) - asset_ctxs = meta[1] if len(meta) > 1 else [] - - # Build volume + symbol lookup - vol_map: dict[str, float] = {} - for i, asset in enumerate(universe): - name = asset["name"] - if i < len(asset_ctxs): - ctx = asset_ctxs[i] - vol_map[name] = float(ctx.get("dayNtlVlm", 0)) - - symbols = [a["name"] for a in universe] - - header = ( - f"{'Symbol':<10} {'Vol24h($M)':>12} {'Spread%':>9}" - f" {'Hurst':>8} {'Verdict'}" - ) - print(header) - print("-" * len(header)) - - for symbol in symbols: - vol_24h = vol_map.get(symbol, 0) - - # §4.5: 24h volume > $50M - if vol_24h < 50_000_000: - continue - - # Bid-ask spread - try: - l2 = client.info.l2_snapshot(symbol) - bid = float(l2["levels"][0][0]["px"]) - ask = float(l2["levels"][1][0]["px"]) - mid = (bid + ask) / 2 - spread_pct = (ask - bid) / mid * 100 if mid > 0 else 999 - except Exception: - spread_pct = 999.0 - - # Hurst exponent from historical candles - candles = db.get_candles( - symbol, config.data.primary_timeframe, limit=500, descending=True, - ) - if len(candles) < 50: - continue - closes = pd.Series([c["close"] for c in candles]) - h = hurst_exponent(closes) - - # Verdict: must pass all §4.5 criteria - passes_hurst = h < 0.5 - passes_spread = spread_pct < 0.05 - if passes_hurst and passes_spread: - verdict = "CANDIDATE" - elif passes_hurst: - verdict = "MEAN-REV (wide spread)" - elif h > 0.55: - verdict = "TRENDING" - else: - verdict = "RANDOM" - - vol_m = vol_24h / 1_000_000 - print( - f"{symbol:<10} {vol_m:>12.1f} {spread_pct:>8.4f}%" - f" {h:>8.3f} {verdict}" - ) - finally: - db.close() - - -def run_compare( - config_path: str | None = None, days: int = 7, -) -> None: - """Compare paper trading results against backtest for the same period.""" - from perp_bot.reporting.compare import compare_paper_vs_backtest - - config = load_config(config_path) - db = Database(config.data.db_path) - now_ms = int(time.time() * 1000) - start_ms = now_ms - days * 24 * 3600 * 1000 - - try: - for symbol in config.trading.symbols: - print(compare_paper_vs_backtest( - config, db, symbol, start_ms, now_ms, - )) - print() - finally: - db.close() - - -def _trade_export_path( - raw_path: str, symbol: str, all_symbols: list[str], -) -> str: - """Return a per-symbol export path when backtesting multiple symbols.""" - if len(all_symbols) == 1: - return raw_path - - path = Path(raw_path) - if path.suffix: - return str(path.with_name(f"{path.stem}-{symbol}{path.suffix}")) - return str(path.with_name(f"{path.name}-{symbol}")) - - -def run_review(config_path: str | None = None, weeks: int = 1) -> None: - """Print a weekly performance review.""" - from perp_bot.reporting.weekly import generate_weekly_report - - config = load_config(config_path) - db = Database(config.data.db_path) - try: - print(generate_weekly_report(db, weeks)) - finally: - db.close() - - -def main() -> None: - setup_logging() - parser = argparse.ArgumentParser(description="Hyperliquid mean-reversion bot") - parser.add_argument( - "command", - choices=[ - "backfill", "trade", "backfill-predictions", - "backtest", "walkforward", "sensitivity", - "screen", "review", "compare", "tui", "status", - ], - help="Command to run", - ) - parser.add_argument("--config", default=None, help="Path to config.yaml") - parser.add_argument("--weeks", type=int, default=1, help="Weeks for review report") - parser.add_argument("--days", type=int, default=7, help="Days for compare range") - parser.add_argument("--force", action="store_true", help="Override safety halts") - args = parser.parse_args() - - if args.command == "backfill": - run_backfill(args.config) - elif args.command == "trade": - run_trading_loop(args.config) - elif args.command == "backfill-predictions": - run_backfill_predictions(args.config) - elif args.command == "backtest": - run_backtest(args.config) - elif args.command == "walkforward": - run_walkforward(args.config) - elif args.command == "sensitivity": - run_sensitivity(args.config) - elif args.command == "screen": - run_screen(args.config) - elif args.command == "review": - run_review(args.config, args.weeks) - elif args.command == "compare": - run_compare(args.config, args.days) - elif args.command == "tui": - run_tui(args.config) - elif args.command == "status": - run_status(args.config) +from perp_bot.cli import main if __name__ == "__main__": diff --git a/pyproject.toml b/pyproject.toml index 80538ab..746bdd3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,6 +4,22 @@ version = "0.1.0" description = "Hyperliquid mean-reversion perpetual futures trading bot" readme = "README.md" requires-python = ">=3.12" +license = { file = "LICENSE" } +authors = [ + { name = "morfize" }, +] +keywords = ["hyperliquid", "trading", "perpetuals", "crypto", "cli"] +classifiers = [ + "Development Status :: 3 - Alpha", + "Environment :: Console", + "Intended Audience :: Developers", + "Intended Audience :: Financial and Insurance Industry", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Topic :: Office/Business :: Financial :: Investment", +] dependencies = [ "hyperliquid-python-sdk", "websockets", @@ -22,6 +38,15 @@ dev = [ "ruff", ] +[project.scripts] +perpbot = "perp_bot.cli:main" + +[project.urls] +Homepage = "https://github.com/morfize/perp-bot" +Repository = "https://github.com/morfize/perp-bot" +Issues = "https://github.com/morfize/perp-bot/issues" +Changelog = "https://github.com/morfize/perp-bot/blob/main/CHANGELOG.md" + [tool.ruff] line-length = 100 target-version = "py312" @@ -46,3 +71,15 @@ dev = [ [tool.hatch.build.targets.wheel] packages = ["src/perp_bot"] + +[tool.hatch.build.targets.sdist] +exclude = [ + "/.claude", + "/.pytest_cache", + "/.ruff_cache", + "/.venv", + "/__pycache__", + "/dist", + "/main.pyc", + "/scratch.py", +] diff --git a/src/perp_bot/cli.py b/src/perp_bot/cli.py new file mode 100644 index 0000000..232741a --- /dev/null +++ b/src/perp_bot/cli.py @@ -0,0 +1,837 @@ +"""Main entry point — data backfill and trading loop.""" + +from __future__ import annotations + +import argparse +import json +import logging +import sys +import time +from pathlib import Path + +import pandas as pd + +from perp_bot.backtest.config import BacktestConfig +from perp_bot.backtest.engine import BacktestEngine +from perp_bot.backtest.sensitivity import ParameterSensitivityAnalyzer +from perp_bot.backtest.walk_forward import WalkForwardRunner +from perp_bot.config import load_config +from perp_bot.data.client import INTERVAL_MS, HyperliquidClient +from perp_bot.data.db import Database +from perp_bot.data.ingest import DataIngestor +from perp_bot.data.prediction_client import KalshiClient, PolymarketClient +from perp_bot.data.ws_client import WsClient +from perp_bot.execution.executor import Executor, PaperExecutor +from perp_bot.execution.live_executor import LiveExecutor +from perp_bot.infra.alerts import send_discord_alert, send_telegram_alert +from perp_bot.infra.health import HealthChecker +from perp_bot.infra.logging import setup_logging +from perp_bot.ipc.protocol import get_socket_path +from perp_bot.ipc.server import DaemonStateServer +from perp_bot.ipc.state import DaemonState +from perp_bot.risk.manager import RiskManager +from perp_bot.signals.engine import Signal, SignalEngine +from perp_bot.signals.prediction import ( + PredictionRegime, + compute_regime, + funding_side_preference, + rate_change_score, + war_risk_score, +) + +logger = logging.getLogger(__name__) + + +def run_backfill(config_path: str | None = None) -> None: + """One-shot: backfill historical data into SQLite.""" + config = load_config(config_path) + db = Database(config.data.db_path) + client = HyperliquidClient() + ingestor = DataIngestor(config, db, client) + + try: + ingestor.run_full_backfill() + logger.info("Backfill complete") + finally: + db.close() + + +def run_trading_loop(config_path: str | None = None, force: bool = False) -> None: + """Main trading loop — runs until interrupted. + + Supports 'paper' and 'live' modes. In live mode, uses LiveExecutor + for real order placement and WebSocket for real-time price feeds. + Exposes runtime state via Unix socket for TUI attachment. + """ + config = load_config(config_path) + db = Database(config.data.db_path) + client = HyperliquidClient() + ingestor = DataIngestor(config, db, client) + signal_engine = SignalEngine(config.signals) + risk_manager = RiskManager(config, db) + entries_halted = False + + # Mode-specific setup + executor: Executor + ws_client: WsClient | None = None + + if config.mode == "live": + if not config.hl_private_key: + logger.error("HL_PRIVATE_KEY required for live mode") + sys.exit(1) + executor = LiveExecutor(config, db) + # Set leverage — refuse to start if it fails + for sym in config.trading.symbols: + if not executor.set_leverage(sym, config.trading.leverage): + logger.error("Failed to set leverage for %s — aborting live mode", sym) + sys.exit(1) + # Reconcile exchange positions against DB + _reconcile_positions(executor, db, config) + logger.info("Live mode — real orders will be placed") + else: + executor = PaperExecutor(db) + logger.info("Paper mode — no real orders") + + # Auto-stop after 3 consecutive losing weeks (§5.3) + if not _check_losing_weeks(db) and not force: + open_trade_count = len(db.get_open_trades()) + if config.mode == "live" and open_trade_count > 0: + entries_halted = True + logger.warning( + "Losing weeks halt active — managing %d existing live position(s)," + " new entries disabled", + open_trade_count, + ) + _alert( + config, + "BOT HALTED: 3 consecutive losing weeks — managing existing" + " live positions only", + ) + else: + logger.error("Use --force to override the losing weeks halt") + _alert(config, "BOT HALTED: 3 consecutive losing weeks") + sys.exit(1) + + # WebSocket for real-time prices (both modes benefit) + ws_client = WsClient() + ws_client.subscribe_mid_prices(config.trading.symbols) + + # Health checker for periodic heartbeat + health_checker = HealthChecker(config, db, ws_client, executor) + + # Prediction market clients + prediction_clients = _init_prediction_clients(config) + + # IPC: daemon state + socket server for TUI attachment + daemon_state = DaemonState(mode=config.mode) + socket_path = get_socket_path(config.data.db_path) + state_server = DaemonStateServer(socket_path, daemon_state, executor, db) + state_server.start() + + # Set up file logging for TUI log tailing + log_file = str(socket_path.parent / "perp-bot.log") + setup_logging(log_file=log_file) + + tf = config.data.primary_timeframe + interval_ms = INTERVAL_MS[tf] + min_candles = max( + config.signals.zscore_lookback, + config.signals.bollinger_period, + config.signals.rsi_period, + config.signals.adx_period * 2, # ADX needs more warmup + ) + + logger.info("Starting trading loop — mode=%s, timeframe=%s", config.mode, tf) + if config.discord_webhook_url: + send_discord_alert( + config.discord_webhook_url, + f"Bot started — mode={config.mode}", + ) + + last_prediction_poll_ms = 0 + prediction_regime_label = "NORMAL" + + try: + while True: + # WebSocket health check — reconnect if stale + if ws_client and not ws_client.is_healthy(): + logger.warning("WebSocket stale — triggering reconnect") + _alert(config, "WS RECONNECT: price feed was stale") + ws_client.reconnect() + + # Check pause state — skip tick but still monitor health + if daemon_state.paused: + daemon_state.update( + ws_healthy=ws_client.is_healthy() if ws_client else False, + ) + health_checker.tick(prediction_regime_label) + _sleep_until_next_candle(interval_ms) + continue + + last_prediction_poll_ms, prediction_regime_label = _tick( + config, db, client, ingestor, signal_engine, + risk_manager, executor, tf, min_candles, + prediction_clients, last_prediction_poll_ms, + ws_client, daemon_state, entries_halted, + ) + + # Update daemon state after tick + _update_daemon_state( + daemon_state, config, ws_client, risk_manager, + executor, prediction_regime_label, db, + ) + + health_checker.tick(prediction_regime_label) + _sleep_until_next_candle(interval_ms) + except KeyboardInterrupt: + logger.info("Shutting down") + finally: + state_server.stop() + if ws_client: + ws_client.close() + db.close() + + +def _reconcile_positions( + executor: LiveExecutor, db: Database, config, +) -> None: + """Reconcile exchange positions against DB on startup. + + - Exchange has position but DB doesn't → create DB record from exchange state + - DB has open position but exchange doesn't → close DB record as reconciled + """ + for symbol in config.trading.symbols: + exchange_pos = executor.get_exchange_position(symbol) + db_trades = db.get_open_trades(symbol) + + if exchange_pos and not db_trades: + # Exchange has a position we don't know about + now = int(time.time() * 1000) + trade_id = db.insert_trade({ + "symbol": symbol, + "side": exchange_pos["side"], + "entry_time": now, + "entry_price": exchange_pos["entry_price"], + "size_usd": exchange_pos["entry_price"] * exchange_pos["size_base"], + "is_paper": 0, + }) + logger.warning( + "RECONCILED: Found exchange position %s %s (%.4f @ %.2f) " + "not in DB — created trade #%d", + exchange_pos["side"], symbol, + exchange_pos["size_base"], exchange_pos["entry_price"], + trade_id, + ) + _alert( + config, + f"RECONCILE: Adopted orphan {exchange_pos['side']} " + f"{symbol} @ {exchange_pos['entry_price']:.2f}", + ) + + elif db_trades and not exchange_pos: + # DB thinks we have a position but exchange doesn't + for trade in db_trades: + now = int(time.time() * 1000) + db.close_trade( + trade["id"], now, trade["entry_price"], 0.0, + "reconciled_missing", + ) + logger.warning( + "RECONCILED: DB trade #%d (%s %s) has no exchange position — " + "closed as reconciled_missing", + trade["id"], trade["side"], symbol, + ) + _alert( + config, + f"RECONCILE: Closed {len(db_trades)} orphan DB trade(s) for {symbol}", + ) + + elif exchange_pos and db_trades: + logger.info( + "Position sync OK: %s %s matches DB trade #%d", + exchange_pos["side"], symbol, db_trades[0]["id"], + ) + + +def _check_losing_weeks(db: Database, num_weeks: int = 3) -> bool: + """Check if the last N weeks were all net-negative. Returns True if safe to start.""" + now_ms = int(time.time() * 1000) + week_ms = 7 * 24 * 3600 * 1000 + + losing_count = 0 + for i in range(num_weeks): + end = now_ms - i * week_ms + start = end - week_ms + trades = db.get_closed_trades_in_range(start, end) + if not trades: + return True # Not enough history — safe to start + weekly_pnl = sum(t.get("pnl", 0) or 0 for t in trades) + if weekly_pnl < 0: + losing_count += 1 + + if losing_count >= num_weeks: + logger.error( + "HALTED: %d consecutive losing weeks detected", num_weeks, + ) + return False + return True + + +def _init_prediction_clients(config) -> dict: + """Create prediction market client instances based on configured sources.""" + clients: dict = {} + if not config.prediction or not config.prediction.enabled: + return clients + sources = {m.source for m in config.prediction.markets} + if "polymarket" in sources: + clients["polymarket"] = PolymarketClient() + if "kalshi" in sources: + clients["kalshi"] = KalshiClient() + return clients + + +def _tick( + config, db, client, ingestor, signal_engine, + risk_manager, executor, tf, min_candles, + prediction_clients=None, last_prediction_poll_ms=0, + ws_client=None, daemon_state=None, entries_halted=False, +) -> tuple[int, str]: + """Single iteration of the trading loop. Returns (last_prediction_poll_ms, regime_label).""" + prediction_clients = prediction_clients or {} + + # --- Prediction market polling --- + prediction_regime = PredictionRegime.NORMAL + preferred_side = None + now_ms = int(time.time() * 1000) + + if config.prediction and config.prediction.enabled and prediction_clients: + poll_interval_ms = config.prediction.poll_interval_minutes * 60_000 + if now_ms - last_prediction_poll_ms >= poll_interval_ms: + last_prediction_poll_ms = now_ms + ingestor.update_predictions(prediction_clients, config.prediction) + + # Compute regime from latest cached data + prediction_regime, preferred_side = _compute_prediction_state(db, config) + + for symbol in config.trading.symbols: + # Update candles + ingestor.update_candles(symbol) + + # Load candles into DataFrame + candles = db.get_candles(symbol, tf, limit=min_candles + 50) + if len(candles) < min_candles: + logger.warning( + "Not enough candles for %s %s (%d < %d)", + symbol, tf, len(candles), min_candles, + ) + continue + + df = pd.DataFrame(candles) + df = signal_engine.compute_indicators(df) + + # Current position state + open_trades = db.get_open_trades(symbol) + position_side = open_trades[0]["side"] if open_trades else None + + # Check position-level exits first + if open_trades: + trade = open_trades[0] + current_price = _get_price(ws_client, client, symbol) + + # Capital-based stop loss + if risk_manager.check_stop_loss( + trade["entry_price"], current_price, trade["side"], trade["size_usd"], + ): + pnl = _compute_pnl(trade, current_price) + executor.close_position( + trade["id"], symbol, current_price, pnl, + "capital_stop_loss", + ) + risk_manager.record_stop_loss() + _alert(config, f"STOP LOSS {symbol} pnl={pnl:.2f}") + continue + + # Position timeout + if risk_manager.check_position_timeout(trade["entry_time"]): + pnl = _compute_pnl(trade, current_price) + executor.close_position( + trade["id"], symbol, current_price, pnl, + "timeout_24h", + ) + _alert(config, f"TIMEOUT {symbol} pnl={pnl:.2f}") + continue + + # Signal evaluation with prediction regime + result = signal_engine.evaluate( + df, position_side, prediction_regime, preferred_side, + ) + + # Expose signal to daemon state for TUI + if daemon_state is not None: + daemon_state.latest_signals[symbol] = { + "signal": result.signal.value, + "reason": result.reason, + "zscore": result.zscore_value, + "rsi": result.rsi_value, + "adx": result.adx_value, + "price": result.price, + } + + if result.signal == Signal.CLOSE and open_trades: + trade = open_trades[0] + current_price = _get_price(ws_client, client, symbol) + pnl = _compute_pnl(trade, current_price) + executor.close_position( + trade["id"], symbol, current_price, pnl, result.reason, + ) + _alert(config, f"CLOSE {symbol} {result.reason} pnl={pnl:.2f}") + + elif result.signal in (Signal.LONG, Signal.SHORT) and not open_trades: + if entries_halted: + logger.info( + "Entry blocked by losing weeks halt for %s", symbol, + ) + continue + risk_check = risk_manager.check_entry() + if risk_check.allowed: + size = risk_manager.compute_position_size(prediction_regime) + if size <= 0: + logger.info( + "Position size zero — regime=%s", + prediction_regime.value, + ) + continue + price = _get_price(ws_client, client, symbol) + trade_id = executor.open_position( + symbol, result.signal.value, size, price, + ) + if trade_id is None: + logger.error( + "Entry failed for %s %s — skipping OPEN alert", + result.signal.value, symbol, + ) + continue + regime_tag = "" + if prediction_regime != PredictionRegime.NORMAL: + regime_tag = f" [{prediction_regime.value}]" + _alert( + config, + f"OPEN {result.signal.value} {symbol}" + f" @ {price:.2f} size=${size:.0f}" + f"{regime_tag}", + ) + # Alert if server-side SL placement failed + if ( + isinstance(executor, LiveExecutor) + and executor._sl_failed + ): + _alert( + config, + f"CRITICAL: {symbol} UNHEDGED — " + f"server-side SL failed!", + ) + else: + logger.info("Entry blocked: %s", risk_check.reason) + + return last_prediction_poll_ms, prediction_regime.value + + +def _get_price(ws_client, rest_client, symbol: str) -> float: + """Get mid price from WebSocket cache, falling back to REST.""" + if ws_client is not None: + ws_price = ws_client.get_mid_price(symbol) + if ws_price is not None: + return ws_price + return rest_client.get_mid_price(symbol) + + +def _compute_prediction_state(db, config): + """Compute prediction regime and preferred side from latest DB snapshots.""" + snapshots = db.get_latest_predictions() + if not snapshots: + return PredictionRegime.NORMAL, None + + war_snapshots = [s for s in snapshots if s["category"] == "war_risk"] + rate_snapshots = [s for s in snapshots if s["category"] == "rate_change"] + + market_weights = { + m.slug: m.weight + for m in config.prediction.markets + if m.category == "war_risk" + } + w_risk = war_risk_score(war_snapshots, market_weights) + r_change = rate_change_score(rate_snapshots) + + regime = compute_regime(w_risk, r_change, config.prediction) + preferred = funding_side_preference(r_change, config.prediction.rate_change_threshold) + + if regime != PredictionRegime.NORMAL: + logger.info( + "Prediction regime=%s war_risk=%.3f rate_change=%.3f preferred=%s", + regime.value, w_risk, r_change, preferred, + ) + + return regime, preferred + + +def _compute_pnl(trade: dict, current_price: float) -> float: + """Compute unrealised P&L for a trade.""" + if trade["side"] == "long": + return (current_price - trade["entry_price"]) / trade["entry_price"] * trade["size_usd"] + else: + return (trade["entry_price"] - current_price) / trade["entry_price"] * trade["size_usd"] + + +def _alert(config, message: str) -> None: + logger.info(message) + if config.discord_webhook_url: + send_discord_alert(config.discord_webhook_url, message) + if config.telegram_bot_token and config.telegram_chat_id: + send_telegram_alert( + config.telegram_bot_token, config.telegram_chat_id, message, + ) + + +def _sleep_until_next_candle(interval_ms: int) -> None: + """Sleep until the next candle boundary plus a small buffer.""" + now_ms = int(time.time() * 1000) + next_candle = ((now_ms // interval_ms) + 1) * interval_ms + sleep_secs = (next_candle - now_ms) / 1000 + 5 # 5s buffer for candle to finalise + logger.debug("Sleeping %.1fs until next candle", sleep_secs) + time.sleep(sleep_secs) + + +def _update_daemon_state( + daemon_state: DaemonState, + config, ws_client, risk_manager, executor, + prediction_regime_label: str, db, +) -> None: + """Refresh the daemon state after a tick for IPC clients.""" + now_ms = int(time.time() * 1000) + + # Mid prices + mid_prices = {} + if ws_client: + for sym in config.trading.symbols: + p = ws_client.get_mid_price(sym) + if p is not None: + mid_prices[sym] = p + + # Risk check + risk_check = risk_manager.check_entry() + + # Cooldown remaining + cooldown_s = 0.0 + if risk_manager._last_stop_loss_time is not None: + elapsed = now_ms - risk_manager._last_stop_loss_time + remaining = risk_manager.risk.cooldown_seconds * 1000 - elapsed + if remaining > 0: + cooldown_s = remaining / 1000 + + # Daily PnL + day_start = (int(time.time()) - int(time.time()) % 86400) * 1000 + daily_pnl = db.get_daily_pnl(day_start) + + # Slippage + slippage_stats = {"count": 0, "avg_pct": 0.0, "max_pct": 0.0} + if hasattr(executor, "get_slippage_stats"): + slippage_stats = executor.get_slippage_stats() + + daemon_state.update( + tick_count=daemon_state.tick_count + 1, + last_tick_ms=now_ms, + mid_prices=mid_prices, + ws_healthy=ws_client.is_healthy() if ws_client else False, + prediction_regime=prediction_regime_label, + risk_allowed=risk_check.allowed, + risk_reason=risk_check.reason, + cooldown_remaining_s=cooldown_s, + daily_pnl=daily_pnl, + slippage_stats=slippage_stats, + ) + + +def run_status(config_path: str | None = None) -> None: + """One-shot: connect to daemon, print formatted state, exit.""" + from perp_bot.ipc.client import DaemonClient + + config = load_config(config_path) + socket_path = get_socket_path(config.data.db_path) + client = DaemonClient(socket_path) + + if not client.is_running(): + print("Daemon is not running (socket not found or not responding)") + sys.exit(1) + + state = client.get_state() + if state is None: + print("Failed to get state from daemon") + sys.exit(1) + + print(json.dumps(state, indent=2)) + + +def run_tui(config_path: str | None = None) -> None: + """Launch the TUI dashboard connecting to a running daemon.""" + from perp_bot.tui.app import PerpBotApp + + config = load_config(config_path) + app = PerpBotApp(config) + app.run() + + +def run_backfill_predictions(config_path: str | None = None) -> None: + """One-shot: fetch current prediction market snapshots into SQLite.""" + config = load_config(config_path) + if not config.prediction or not config.prediction.enabled: + logger.error("Prediction markets not configured or disabled") + sys.exit(1) + + db = Database(config.data.db_path) + clients = _init_prediction_clients(config) + ingestor = DataIngestor(config, db, HyperliquidClient()) + + try: + count = ingestor.update_predictions(clients, config.prediction) + logger.info("Inserted %d prediction snapshots", count) + finally: + db.close() + + +def run_backtest(config_path: str | None = None) -> None: + """Run a backtest over all historical data.""" + config = load_config(config_path) + bt_config = config.backtest or BacktestConfig() + db = Database(config.data.db_path) + + try: + engine = BacktestEngine(config, bt_config) + for symbol in config.trading.symbols: + logger.info("Running backtest for %s", symbol) + result = engine.run(db, symbol) + print(result.summary()) + if bt_config.export_trades_csv: + export_path = _trade_export_path( + bt_config.export_trades_csv, symbol, config.trading.symbols, + ) + result.trades_to_csv(export_path) + logger.info("Trades exported to %s", export_path) + finally: + db.close() + + +def run_walkforward(config_path: str | None = None) -> None: + """Run walk-forward analysis.""" + config = load_config(config_path) + bt_config = config.backtest or BacktestConfig() + db = Database(config.data.db_path) + + try: + for symbol in config.trading.symbols: + # Find data range + candles = db.get_candles(symbol, config.data.primary_timeframe, limit=1) + if not candles: + logger.warning("No data for %s", symbol) + continue + first_time = candles[0]["open_time"] + latest = db.get_latest_candle_time(symbol, config.data.primary_timeframe) + if latest is None: + continue + + runner = WalkForwardRunner(config, bt_config) + result = runner.run(db, symbol, first_time, latest) + print(result.summary()) + finally: + db.close() + + +def run_sensitivity(config_path: str | None = None) -> None: + """Run parameter sensitivity analysis.""" + config = load_config(config_path) + bt_config = config.backtest or BacktestConfig() + db = Database(config.data.db_path) + + try: + analyzer = ParameterSensitivityAnalyzer(config, bt_config) + for symbol in config.trading.symbols: + logger.info("Running sensitivity analysis for %s", symbol) + report = analyzer.run(db, symbol) + print(report.summary()) + finally: + db.close() + + +def run_screen(config_path: str | None = None) -> None: + """Screen symbols for mean-reversion suitability (§4.5). + + Criteria: Hurst < 0.5, 24h volume > $50M, tight spread. + """ + from perp_bot.signals.indicators import hurst_exponent + + config = load_config(config_path) + db = Database(config.data.db_path) + client = HyperliquidClient() + + try: + meta = client.get_asset_meta() + if not meta or len(meta) < 2: + print("Failed to fetch asset metadata") + return + + universe = meta[0].get("universe", []) + asset_ctxs = meta[1] if len(meta) > 1 else [] + + # Build volume + symbol lookup + vol_map: dict[str, float] = {} + for i, asset in enumerate(universe): + name = asset["name"] + if i < len(asset_ctxs): + ctx = asset_ctxs[i] + vol_map[name] = float(ctx.get("dayNtlVlm", 0)) + + symbols = [a["name"] for a in universe] + + header = ( + f"{'Symbol':<10} {'Vol24h($M)':>12} {'Spread%':>9}" + f" {'Hurst':>8} {'Verdict'}" + ) + print(header) + print("-" * len(header)) + + for symbol in symbols: + vol_24h = vol_map.get(symbol, 0) + + # §4.5: 24h volume > $50M + if vol_24h < 50_000_000: + continue + + # Bid-ask spread + try: + l2 = client.info.l2_snapshot(symbol) + bid = float(l2["levels"][0][0]["px"]) + ask = float(l2["levels"][1][0]["px"]) + mid = (bid + ask) / 2 + spread_pct = (ask - bid) / mid * 100 if mid > 0 else 999 + except Exception: + spread_pct = 999.0 + + # Hurst exponent from historical candles + candles = db.get_candles( + symbol, config.data.primary_timeframe, limit=500, descending=True, + ) + if len(candles) < 50: + continue + closes = pd.Series([c["close"] for c in candles]) + h = hurst_exponent(closes) + + # Verdict: must pass all §4.5 criteria + passes_hurst = h < 0.5 + passes_spread = spread_pct < 0.05 + if passes_hurst and passes_spread: + verdict = "CANDIDATE" + elif passes_hurst: + verdict = "MEAN-REV (wide spread)" + elif h > 0.55: + verdict = "TRENDING" + else: + verdict = "RANDOM" + + vol_m = vol_24h / 1_000_000 + print( + f"{symbol:<10} {vol_m:>12.1f} {spread_pct:>8.4f}%" + f" {h:>8.3f} {verdict}" + ) + finally: + db.close() + + +def run_compare( + config_path: str | None = None, days: int = 7, +) -> None: + """Compare paper trading results against backtest for the same period.""" + from perp_bot.reporting.compare import compare_paper_vs_backtest + + config = load_config(config_path) + db = Database(config.data.db_path) + now_ms = int(time.time() * 1000) + start_ms = now_ms - days * 24 * 3600 * 1000 + + try: + for symbol in config.trading.symbols: + print(compare_paper_vs_backtest( + config, db, symbol, start_ms, now_ms, + )) + print() + finally: + db.close() + + +def _trade_export_path( + raw_path: str, symbol: str, all_symbols: list[str], +) -> str: + """Return a per-symbol export path when backtesting multiple symbols.""" + if len(all_symbols) == 1: + return raw_path + + path = Path(raw_path) + if path.suffix: + return str(path.with_name(f"{path.stem}-{symbol}{path.suffix}")) + return str(path.with_name(f"{path.name}-{symbol}")) + + +def run_review(config_path: str | None = None, weeks: int = 1) -> None: + """Print a weekly performance review.""" + from perp_bot.reporting.weekly import generate_weekly_report + + config = load_config(config_path) + db = Database(config.data.db_path) + try: + print(generate_weekly_report(db, weeks)) + finally: + db.close() + + +def main() -> None: + setup_logging() + parser = argparse.ArgumentParser(description="Hyperliquid mean-reversion bot") + parser.add_argument( + "command", + choices=[ + "backfill", "trade", "backfill-predictions", + "backtest", "walkforward", "sensitivity", + "screen", "review", "compare", "tui", "status", + ], + help="Command to run", + ) + parser.add_argument("--config", default=None, help="Path to config.yaml") + parser.add_argument("--weeks", type=int, default=1, help="Weeks for review report") + parser.add_argument("--days", type=int, default=7, help="Days for compare range") + parser.add_argument("--force", action="store_true", help="Override safety halts") + args = parser.parse_args() + + if args.command == "backfill": + run_backfill(args.config) + elif args.command == "trade": + run_trading_loop(args.config, force=args.force) + elif args.command == "backfill-predictions": + run_backfill_predictions(args.config) + elif args.command == "backtest": + run_backtest(args.config) + elif args.command == "walkforward": + run_walkforward(args.config) + elif args.command == "sensitivity": + run_sensitivity(args.config) + elif args.command == "screen": + run_screen(args.config) + elif args.command == "review": + run_review(args.config, args.weeks) + elif args.command == "compare": + run_compare(args.config, args.days) + elif args.command == "tui": + run_tui(args.config) + elif args.command == "status": + run_status(args.config) + + +if __name__ == "__main__": + main() diff --git a/src/perp_bot/config.py b/src/perp_bot/config.py index 70d9daa..4126dcd 100644 --- a/src/perp_bot/config.py +++ b/src/perp_bot/config.py @@ -9,8 +9,6 @@ import yaml from dotenv import load_dotenv -_PROJECT_ROOT = Path(__file__).resolve().parent.parent.parent - @dataclass(frozen=True) class TradingConfig: @@ -117,11 +115,14 @@ class BotConfig: def load_config(config_path: str | Path | None = None) -> BotConfig: """Load config from YAML file + environment variables.""" - load_dotenv(_PROJECT_ROOT / ".env") - if config_path is None: - config_path = _PROJECT_ROOT / "config.yaml" - config_path = Path(config_path) + config_path = Path.cwd() / "config.yaml" + env_path = Path.cwd() / ".env" + else: + config_path = Path(config_path).expanduser().resolve() + env_path = config_path.parent / ".env" + + load_dotenv(env_path) with open(config_path) as f: raw = yaml.safe_load(f) diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 0000000..da461af --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,70 @@ +"""Tests for config loading behavior when installed as a CLI.""" + +from __future__ import annotations + +from pathlib import Path + +from perp_bot.config import load_config + +CONFIG_YAML = """\ +trading: + symbols: ["ETH"] + leverage: 3 + capital_usd: 670.0 + margin_usage_limit: 0.5 +signals: + zscore_lookback: 20 + zscore_entry_threshold: 2.0 + zscore_exit_threshold: 0.3 + zscore_stop_threshold: 3.0 + bollinger_period: 20 + bollinger_std: 2.0 + rsi_period: 14 + rsi_overbought: 70 + rsi_oversold: 30 + adx_period: 14 + adx_threshold: 25 +risk: + max_loss_per_trade_pct: 0.03 + daily_loss_limit_pct: 0.08 + max_positions: 1 + cooldown_seconds: 1800 + position_timeout_hours: 24 +data: + timeframes: ["15m"] + primary_timeframe: "15m" + history_days: 90 + db_path: ":memory:" +execution: + order_type: "limit" + taker_fallback_seconds: 30 + use_server_side_stop: true +mode: "paper" +""" + + +def test_load_config_defaults_to_current_working_directory(monkeypatch, tmp_path: Path): + monkeypatch.delenv("DISCORD_WEBHOOK_URL", raising=False) + (tmp_path / "config.yaml").write_text(CONFIG_YAML) + (tmp_path / ".env").write_text("DISCORD_WEBHOOK_URL=https://example.test/hook\n") + + monkeypatch.chdir(tmp_path) + + config = load_config() + + assert config.data.db_path == ":memory:" + assert config.discord_webhook_url == "https://example.test/hook" + + +def test_load_config_uses_env_next_to_explicit_config(monkeypatch, tmp_path: Path): + monkeypatch.delenv("TELEGRAM_CHAT_ID", raising=False) + config_dir = tmp_path / "instance" + config_dir.mkdir() + (config_dir / "config.yaml").write_text(CONFIG_YAML) + (config_dir / ".env").write_text("TELEGRAM_CHAT_ID=12345\n") + + monkeypatch.chdir(tmp_path) + + config = load_config(config_dir / "config.yaml") + + assert config.telegram_chat_id == "12345" diff --git a/tests/test_main.py b/tests/test_main.py index 335e5b7..289f737 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -2,10 +2,11 @@ from __future__ import annotations +import sys from pathlib import Path from types import SimpleNamespace -import main +import perp_bot.cli as main from perp_bot.config import ( BacktestConfig, BotConfig, @@ -250,8 +251,23 @@ def fake_tick(*args, **_kwargs): monkeypatch.setattr(main, "_tick", fake_tick) monkeypatch.setattr(main, "_alert", lambda *_args, **_kwargs: None) monkeypatch.setattr(main, "get_socket_path", lambda _db_path: Path("/tmp/perp-bot.sock")) - monkeypatch.setattr(main.sys, "argv", ["main.py", "trade"]) main.run_trading_loop() assert call_order == ["reconcile", "check", "tick"] + + +def test_main_passes_force_to_trade_command(monkeypatch): + called: list[tuple[str | None, bool]] = [] + + monkeypatch.setattr(main, "setup_logging", lambda *args, **kwargs: None) + monkeypatch.setattr( + main, + "run_trading_loop", + lambda config_path=None, force=False: called.append((config_path, force)), + ) + monkeypatch.setattr(sys, "argv", ["perpbot", "trade", "--config", "config.yaml", "--force"]) + + main.main() + + assert called == [("config.yaml", True)] From fbc5b4b320ea402a8fc996a54749fd1294215b4f Mon Sep 17 00:00:00 2001 From: morfize <233522679+morfize@users.noreply.github.com> Date: Thu, 19 Mar 2026 04:11:23 -0700 Subject: [PATCH 7/8] docs: expand README and add developer guides --- README.md | 258 +++++++++++++++++++++++++++++++++++------- docs/README.md | 24 ++++ docs/architecture.md | 238 ++++++++++++++++++++++++++++++++++++++ docs/configuration.md | 223 ++++++++++++++++++++++++++++++++++++ docs/development.md | 202 +++++++++++++++++++++++++++++++++ docs/operations.md | 234 ++++++++++++++++++++++++++++++++++++++ 6 files changed, 1141 insertions(+), 38 deletions(-) create mode 100644 docs/README.md create mode 100644 docs/architecture.md create mode 100644 docs/configuration.md create mode 100644 docs/development.md create mode 100644 docs/operations.md diff --git a/README.md b/README.md index 120271a..f10b838 100644 --- a/README.md +++ b/README.md @@ -1,77 +1,259 @@ -# perpbot +# perp-bot -`perpbot` is a Python CLI for running a Hyperliquid mean-reversion perpetual futures bot, backfilling market data, backtesting, screening symbols, and monitoring a running daemon. +`perp-bot` is a Python CLI for running a Hyperliquid perpetual futures mean-reversion bot, collecting market data, backtesting the strategy, and monitoring a live daemon from a terminal UI. -## Release status +This repository is structured as an installable package with a `perpbot` console command, SQLite-backed local state, and separate modules for data ingestion, signals, execution, risk, reporting, IPC, and the Textual TUI. -- Python package with console entry point: `perpbot` -- GitHub Actions CI for lint, tests, and build validation -- GitHub tag-based release workflow that uploads wheel and source distribution artifacts +## Status -## Install +- Package name: `perp-bot` +- CLI entry point: `perpbot` +- Python requirement: `>=3.12` +- Current release: `0.1.0` +- Maturity: alpha -Local development install: +## What It Does + +- Backfills OHLCV candles and funding history from Hyperliquid into SQLite +- Polls prediction markets and stores regime signals alongside market data +- Runs a paper or live trading loop with risk controls and health checks +- Exposes daemon state over a Unix socket for `status` and `tui` +- Backtests the strategy with fees, slippage, funding, and execution delay +- Produces comparison and weekly review reports from stored trades + +## Quick Start + +### 1. Install dependencies + +```bash +uv sync --group dev +``` + +### 2. Create local secrets + +```bash +cp .env.example .env +``` + +For paper trading, you can leave the Hyperliquid key empty. + +### 3. Review the sample config + +The repository ships with a starter [`config.yaml`](config.yaml). By default, `perpbot` reads: + +- `config.yaml` from the current working directory +- `.env` from the current working directory + +If you pass `--config /path/to/config.yaml`, the CLI also loads `.env` from that config file's directory. + +### 4. Backfill data + +```bash +uv run perpbot backfill +``` + +### 5. Run a backtest + +```bash +uv run perpbot backtest +``` + +### 6. Start paper trading + +```bash +uv run perpbot trade +``` + +In another terminal, you can inspect the daemon: ```bash -uv sync +uv run perpbot status +uv run perpbot tui +``` + +## Installation + +### Local development install + +```bash +uv sync --group dev uv run perpbot --help ``` -Install directly from GitHub: +### Install directly from GitHub ```bash uv tool install git+https://github.com/morfize/perp-bot.git perpbot --help ``` -Or with `pip`: +### Install a tagged release ```bash -pip install "git+https://github.com/morfize/perp-bot.git" -perpbot --help +uv tool install git+https://github.com/morfize/perp-bot.git@v0.1.0 ``` -Install a tagged release: +### Compatibility shim + +Direct execution still works: ```bash -uv tool install git+https://github.com/morfize/perp-bot.git@v0.1.0 +python main.py --help ``` ## Configuration -By default, `perpbot` looks for these files in your current working directory: +### YAML config + +The main config file is [`config.yaml`](config.yaml). Top-level sections are: + +- `trading` +- `signals` +- `risk` +- `data` +- `execution` +- `prediction` +- `backtest` +- `mode` + +Important defaults in the sample config: + +- symbol universe starts with `["ETH"]` +- primary timeframe is `15m` +- database path is `perp_bot.db` +- mode defaults to `paper` + +### Environment variables + +Secrets and alert integrations are read from `.env`: + +- `HL_PRIVATE_KEY` +- `HL_WALLET_ADDRESS` +- `DISCORD_WEBHOOK_URL` +- `TELEGRAM_BOT_TOKEN` +- `TELEGRAM_CHAT_ID` + +Use [`.env.example`](.env.example) as the starting template. + +### Live mode + +To enable live execution: + +1. Set `mode: "live"` in `config.yaml` +2. Provide `HL_PRIVATE_KEY` in `.env` +3. Optionally provide `HL_WALLET_ADDRESS` if you do not want to rely on key-derived address detection + +Live mode sets leverage on startup, reconciles exchange positions against the local database, and attempts to attach a server-side stop-loss to each new position. + +## CLI Commands + +| Command | Purpose | +| --- | --- | +| `perpbot backfill` | Backfill historical candles and funding into SQLite | +| `perpbot trade` | Start the main paper/live trading loop | +| `perpbot trade --force` | Override the consecutive losing-weeks startup halt | +| `perpbot backfill-predictions` | Fetch current prediction market snapshots | +| `perpbot backtest` | Run a full backtest over stored market data | +| `perpbot walkforward` | Run walk-forward analysis | +| `perpbot sensitivity` | Run parameter sensitivity analysis | +| `perpbot screen` | Screen markets for mean-reversion suitability | +| `perpbot review --weeks 1` | Generate a weekly performance review | +| `perpbot compare --days 7` | Compare paper trading against backtest results | +| `perpbot status` | Print daemon state as JSON | +| `perpbot tui` | Launch the Textual monitoring dashboard | + +Run the global help for all flags: + +```bash +uv run perpbot --help +``` + +## Architecture Summary + +The main runtime pieces are: + +- `perp_bot.data`: Hyperliquid REST/WebSocket access and SQLite persistence +- `perp_bot.signals`: indicator calculations and signal evaluation +- `perp_bot.risk`: pre-entry checks, stop-loss logic, cooldowns, and sizing +- `perp_bot.execution`: paper executor and Hyperliquid live executor +- `perp_bot.backtest`: historical simulation, cost model, metrics, walk-forward, sensitivity +- `perp_bot.ipc`: Unix socket server/client and daemon state model +- `perp_bot.tui`: Textual dashboard for monitoring and emergency actions +- `perp_bot.reporting`: weekly reports and paper-vs-backtest comparison +- `perp_bot.infra`: logging, alerts, and health checks + +At a high level: + +1. `backfill` stores candles and funding in SQLite. +2. `trade` updates candles, computes indicators, checks risk, then opens/closes positions. +3. The trading daemon publishes volatile runtime state over a Unix socket. +4. `status` and `tui` attach to that socket while reading persistent trade data from SQLite. +5. `backtest` reuses the same signal and risk logic against historical data. + +## Repository Layout + +```text +. +├── src/perp_bot/ +│ ├── backtest/ +│ ├── data/ +│ ├── execution/ +│ ├── infra/ +│ ├── ipc/ +│ ├── reporting/ +│ ├── risk/ +│ ├── signals/ +│ └── tui/ +├── tests/ +├── deploy/ +├── docs/ +├── config.yaml +├── .env.example +└── pyproject.toml +``` + +## Developer Docs + +- [Documentation index](docs/README.md) +- [Architecture](docs/architecture.md) +- [Configuration reference](docs/configuration.md) +- [Development workflow](docs/development.md) +- [Operations and deployment](docs/operations.md) -- `config.yaml` -- `.env` +## Development -You can also point the CLI at a specific config file: +### Common commands ```bash -perpbot trade --config /path/to/config.yaml +uv sync --group dev +uv run ruff check src tests +uv run pytest +uv build ``` -When `--config` is provided, `perpbot` also loads `.env` from that config file's directory. +### Test matrix + +GitHub Actions runs: + +- lint on `src` and `tests` +- test suite on Python `3.12` and `3.13` +- package build validation + +## Deployment The repository includes: -- `config.yaml` as a starting config -- `.env.example` as an environment variable template -- `CHANGELOG.md` for release notes +- [`deploy/deploy.sh`](deploy/deploy.sh) for pushing the repo to a GCP Compute Engine instance +- [`deploy/perp-bot.service`](deploy/perp-bot.service) for systemd -## CLI usage +The service expects the app at `/opt/perp-bot` and starts: ```bash -perpbot --help -perpbot trade -perpbot trade --force -perpbot backfill -perpbot backfill-predictions -perpbot backtest -perpbot walkforward -perpbot sensitivity -perpbot screen -perpbot review --weeks 1 -perpbot compare --days 7 -perpbot tui -perpbot status +/opt/perp-bot/.venv/bin/perpbot trade ``` + +## Safety Notes + +- This is trading software. Review the code, config, and exchange behavior before enabling live mode. +- The project includes capital-based stop-losses, daily loss limits, cooldowns, and a losing-weeks startup halt, but those controls do not eliminate market, exchange, or software risk. +- The software is not financial advice. diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..d2eab83 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,24 @@ +# Developer Documentation + +This directory contains the working docs for contributors and operators of `perp-bot`. + +## Documents + +- [`architecture.md`](architecture.md): module boundaries, runtime flow, persistence model, and extension points +- [`configuration.md`](configuration.md): YAML and `.env` reference, runtime artifacts, and mode-specific behavior +- [`development.md`](development.md): local setup, workflow, testing, packaging, and contribution guidance +- [`operations.md`](operations.md): paper/live runbooks, deployment, logging, service management, and troubleshooting + +## Suggested Reading Order + +If you are new to the codebase: + +1. Read the top-level `README.md` +2. Read [`architecture.md`](architecture.md) +3. Read [`configuration.md`](configuration.md) +4. Read [`development.md`](development.md) + +If you are deploying or operating the bot: + +1. Read [`configuration.md`](configuration.md) +2. Read [`operations.md`](operations.md) diff --git a/docs/architecture.md b/docs/architecture.md new file mode 100644 index 0000000..95c1564 --- /dev/null +++ b/docs/architecture.md @@ -0,0 +1,238 @@ +# Architecture + +This document describes how the main parts of `perp-bot` fit together and which source files own each responsibility. + +## System Overview + +`perp-bot` has two main execution styles: + +- one-shot commands such as `backfill`, `backtest`, `review`, and `compare` +- the long-running `trade` daemon, which can be observed through `status` and `tui` + +The trading daemon uses: + +- SQLite for persistent historical data, trades, and small pieces of durable bot state +- a Unix socket for volatile runtime state and control commands +- JSON logs to stdout and an optional rotating file for the TUI log pane + +## Module Map + +### `perp_bot.cli` + +Owns CLI argument parsing and command dispatch. It is also the composition root for the trading loop, backtests, and reporting commands. + +Key responsibilities: + +- load config +- instantiate the database, clients, executors, and risk/signal engines +- run the main trading tick loop +- expose `status` and `tui` + +### `perp_bot.config` + +Defines the typed dataclass config model and loads: + +- `config.yaml` +- `.env` in the working directory, or next to the explicit `--config` path + +The config loader is a central dependency for almost every command. + +### `perp_bot.data` + +Owns exchange and prediction market data access. + +Important files: + +- `client.py`: Hyperliquid REST wrapper +- `ws_client.py`: WebSocket wrapper with mid-price cache and reconnect logic +- `db.py`: SQLite schema and persistence API +- `ingest.py`: orchestration for full backfill and incremental updates +- `prediction_client.py`: prediction market adapters + +### `perp_bot.signals` + +Owns indicator calculation and signal evaluation. + +Important files: + +- `indicators.py`: Z-score, Bollinger Bands, RSI, ADX, Hurst exponent +- `engine.py`: converts indicator state into `LONG`, `SHORT`, `CLOSE`, or `NONE` +- `prediction.py`: prediction-market-derived regime classification + +The backtest and live trading loop intentionally reuse this logic. + +### `perp_bot.risk` + +Owns runtime risk checks and position sizing. + +Important responsibilities: + +- max open positions +- daily realized loss limit +- cooldown after stop loss +- position timeout +- regime-adjusted position sizing + +The backtest has a separate adapter in `backtest/risk_adapter.py` so the simulation can mirror live behavior. + +### `perp_bot.execution` + +Owns order execution. + +- `executor.py`: abstract interface plus `PaperExecutor` +- `live_executor.py`: Hyperliquid order placement, leverage setup, stop-loss attachment, slippage tracking, and exchange reconciliation helpers + +This separation keeps the trading loop mostly mode-agnostic. + +### `perp_bot.backtest` + +Owns historical simulation and analysis. + +Important files: + +- `engine.py`: row-by-row backtest driver +- `executor.py`: simulated trade executor +- `cost_model.py`: fees, slippage, funding +- `metrics.py`: performance metrics +- `walk_forward.py`: train/test window analysis +- `sensitivity.py`: parameter sweeps +- `results.py`: backtest result model and summaries + +### `perp_bot.ipc` + +Owns daemon state sharing and control. + +- `state.py`: thread-safe daemon state container +- `server.py`: Unix socket state server +- `client.py`: socket client used by `status` and TUI actions +- `protocol.py`: command names and socket path helper + +### `perp_bot.tui` + +Owns the Textual dashboard. + +The TUI attaches to a running daemon, reads volatile state over IPC, reads trades from SQLite in read-only mode, and tails the rotating log file. + +### `perp_bot.reporting` + +Owns derived reporting on top of stored trades and backtests. + +- `weekly.py`: weekly performance review +- `compare.py`: paper-vs-backtest comparison + +### `perp_bot.infra` + +Shared operational utilities: + +- `logging.py`: JSON logging +- `health.py`: periodic heartbeat alerts +- `alerts.py`: Discord and Telegram integration + +## Runtime Flows + +### 1. Backfill Flow + +Command: `perpbot backfill` + +1. Load config and open the database. +2. Create `HyperliquidClient` and `DataIngestor`. +3. For each configured symbol, backfill candles for every configured timeframe. +4. Backfill funding history. +5. Persist rows into SQLite with `INSERT OR IGNORE`. + +### 2. Trading Flow + +Command: `perpbot trade` + +1. Load config and open the database. +2. Create the data ingestor, signal engine, risk manager, and executor. +3. In live mode, validate `HL_PRIVATE_KEY`, set exchange leverage per symbol, and reconcile exchange state with local DB state. +4. Start WebSocket mid-price subscription. +5. Start the IPC server for `status` and `tui`. +6. Enter the candle-aligned main loop. + +Each trading tick performs roughly this sequence: + +1. Reconnect WebSocket if the price feed is stale. +2. Poll prediction markets on schedule and compute regime state. +3. Incrementally update candles for each symbol. +4. Load the latest candle window from SQLite. +5. Compute indicators. +6. Check position-level exits first, including capital stop loss and position timeout. +7. Evaluate signal-engine exits or entries. +8. Run pre-entry risk checks before opening anything new. +9. Update daemon state for the IPC/TUI layer. +10. Emit heartbeat alerts and sleep until the next candle boundary. + +### 3. Backtest Flow + +Command: `perpbot backtest` + +1. Load historical candles and funding history from SQLite. +2. Compute indicators once across the full frame. +3. Simulate the strategy row by row after the warmup window. +4. Apply fee, slippage, funding, entry-delay, and cancel-if-signal-gone rules. +5. Produce a `BacktestResult` with trades, equity curve, and metrics. + +The backtest deliberately shares signal logic with live trading to reduce divergence. + +### 4. Monitoring Flow + +Commands: `perpbot status`, `perpbot tui` + +- `status` is a simple socket client that prints the daemon snapshot as JSON. +- `tui` attaches to the same socket, reads open and recent trades directly from SQLite, and tails the daemon log file. + +This split keeps the socket payload small while letting SQLite remain the source of truth for historical trade data. + +## Persistence Model + +The SQLite database currently contains these tables: + +- `candles` +- `funding_rates` +- `prediction_snapshots` +- `trades` +- `bot_state` + +Notable usage: + +- `candles` and `funding_rates` power both live evaluation and backtests +- `prediction_snapshots` decouples regime polling from signal evaluation +- `trades` is the primary audit trail for paper and live execution +- `bot_state` stores small durable flags such as stop-loss cooldown timestamps + +## Runtime Artifacts + +Given `data.db_path`, the process derives: + +- SQLite DB: configured directly, for example `perp_bot.db` +- Unix socket: `/perp-bot.sock` +- rotating log file for TUI: `/perp-bot.log` + +Keeping these together makes it possible for `status` and `tui` to locate the daemon from the same config. + +## Safety Controls + +Important runtime controls implemented in code: + +- max positions +- daily realized loss cap +- cooldown after stop loss +- 24-hour position timeout +- position-size reduction in high-risk prediction regimes +- new-entry block in crisis regimes +- startup halt after three consecutive losing weeks unless `--force` is used +- live position reconciliation before the losing-weeks halt is enforced + +## Extension Points + +Common places to extend the system: + +- add a new report in `perp_bot.reporting` +- add a new CLI command in `perp_bot.cli` +- add new indicators in `perp_bot.signals.indicators` +- add a new prediction source behind `prediction_client.py` +- add new TUI widgets under `perp_bot.tui.widgets` + +When extending behavior, keep live and backtest logic aligned wherever possible. The current design intentionally shares signal semantics across both modes. diff --git a/docs/configuration.md b/docs/configuration.md new file mode 100644 index 0000000..c4f758a --- /dev/null +++ b/docs/configuration.md @@ -0,0 +1,223 @@ +# Configuration Reference + +This document describes the runtime configuration surfaces used by `perp-bot`. + +## Config Sources + +`perp-bot` reads configuration from two places: + +1. `config.yaml` +2. `.env` + +Resolution behavior: + +- default mode: both files are resolved from the current working directory +- explicit mode: when `--config /path/to/config.yaml` is provided, `.env` is loaded from that config file's directory + +This behavior is covered by tests in `tests/test_config.py`. + +## YAML Structure + +The sample [`config.yaml`](../config.yaml) contains these top-level sections. + +## `trading` + +Controls the market universe and top-level position sizing inputs. + +Fields: + +- `symbols`: list of symbols to manage +- `leverage`: exchange leverage target in live mode +- `capital_usd`: modeled capital base +- `margin_usage_limit`: fraction of capital allowed to be committed as margin + +Notes: + +- The live trading loop iterates over `symbols` sequentially. +- `compute_position_size()` uses `capital_usd * margin_usage_limit * leverage`. + +## `signals` + +Controls the core mean-reversion thresholds. + +Fields: + +- `zscore_lookback` +- `zscore_entry_threshold` +- `zscore_exit_threshold` +- `zscore_stop_threshold` +- `bollinger_period` +- `bollinger_std` +- `rsi_period` +- `rsi_overbought` +- `rsi_oversold` +- `adx_period` +- `adx_threshold` + +These values feed `SignalEngine`. + +## `risk` + +Controls hard trading constraints. + +Fields: + +- `max_loss_per_trade_pct` +- `daily_loss_limit_pct` +- `max_positions` +- `cooldown_seconds` +- `position_timeout_hours` + +These values feed `RiskManager`. + +## `data` + +Controls local storage and the candle universe. + +Fields: + +- `timeframes`: all intervals to store during backfill and incremental updates +- `primary_timeframe`: interval used by the strategy and backtests +- `history_days`: lookback range for initial backfill +- `db_path`: SQLite database path + +Notes: + +- The database path also determines where the IPC socket and daemon log file live. +- If you want multiple independent instances, give each one its own `db_path`. + +## `execution` + +Controls entry order behavior. + +Fields: + +- `order_type` +- `taker_fallback_seconds` +- `use_server_side_stop` + +Current implementation details: + +- the live executor uses a limit-first strategy +- after `taker_fallback_seconds`, it may fall back to an IOC taker order +- server-side stop-loss attachment is attempted for live positions + +## `prediction` + +Controls optional prediction market regime input. + +Fields: + +- `enabled` +- `poll_interval_minutes` +- `war_risk_threshold` +- `war_risk_crisis_threshold` +- `rate_change_threshold` +- `position_size_reduction` +- `markets` + +Each market entry includes: + +- `slug` +- `source` +- `market_id` +- `category` +- `weight` + +Notes: + +- `enabled: true` is not enough on its own; the configured market identifiers must also be valid. +- When no prediction data exists yet, the bot falls back to `NORMAL` regime behavior. + +## `backtest` + +Controls simulation behavior. + +Fields: + +- `maker_fee_rate` +- `taker_fee_rate` +- `slippage_min_pct` +- `slippage_max_pct` +- `entry_delay_candles` +- `cancel_if_signal_gone` +- `train_days` +- `test_days` +- `step_days` +- `seed` +- `export_trades_csv` if you add it explicitly + +Notes: + +- If the `backtest` section is missing, code falls back to `BacktestConfig()` defaults. +- Multi-symbol export paths are automatically disambiguated by symbol name. + +## `mode` + +Allowed values: + +- `paper` +- `live` + +Behavior: + +- `paper`: uses `PaperExecutor`, writes simulated trades to SQLite +- `live`: uses `LiveExecutor`, requires exchange credentials, sets leverage, and reconciles positions on startup + +## Environment Variables + +Use [`.env.example`](../.env.example) as the starting point. + +### Required for live trading + +- `HL_PRIVATE_KEY` + +### Optional + +- `HL_WALLET_ADDRESS` +- `DISCORD_WEBHOOK_URL` +- `TELEGRAM_BOT_TOKEN` +- `TELEGRAM_CHAT_ID` + +Notes: + +- `HL_WALLET_ADDRESS` is optional because the live executor can derive the address from the private key. +- Alert variables are optional and can be combined. + +## Runtime Artifacts Derived From `db_path` + +If `db_path` is `./perp_bot.db`, the daemon also uses: + +- socket path: `./perp-bot.sock` +- rotating log file: `./perp-bot.log` + +This is why `status` and `tui` can locate a daemon with only the config file. + +## Recommended Local Config Patterns + +### Paper development + +- keep `mode: "paper"` +- use a local `db_path` such as `./perp_bot.db` +- start with a single symbol +- backfill before running `trade`, `backtest`, or `screen` + +### Separate environments + +Use separate directories or config files for: + +- local development +- paper trading +- live trading + +That keeps databases, socket files, logs, and secrets isolated. + +### Live safety + +Before switching to live mode: + +1. verify the symbol list +2. verify leverage and risk limits +3. confirm alert delivery +4. verify `HL_PRIVATE_KEY` and wallet address +5. test `status` and `tui` against a paper daemon first diff --git a/docs/development.md b/docs/development.md new file mode 100644 index 0000000..1da5ca5 --- /dev/null +++ b/docs/development.md @@ -0,0 +1,202 @@ +# Development Workflow + +This document is for contributors working on the codebase locally. + +## Prerequisites + +- Python `3.12+` +- `uv` + +Optional but useful: + +- SQLite tooling such as `sqlite3` +- a terminal that handles Textual applications well + +## Local Setup + +### Clone and install + +```bash +git clone https://github.com/morfize/perp-bot.git +cd perp-bot +uv sync --group dev +cp .env.example .env +``` + +### Verify the CLI + +```bash +uv run perpbot --help +``` + +### Bootstrap a local database + +```bash +uv run perpbot backfill +``` + +Without a local database, many commands will not have useful data to work with. + +## Common Commands + +### Run tests + +```bash +uv run pytest +``` + +### Run a focused test + +```bash +uv run pytest tests/test_config.py -v +``` + +### Lint + +```bash +uv run ruff check src tests +``` + +### Build the package + +```bash +uv build +``` + +### Run the bot locally + +```bash +uv run perpbot trade +uv run perpbot status +uv run perpbot tui +``` + +## Project Conventions + +### Packaging + +- the package source lives under `src/perp_bot` +- the console entry point is `perpbot = "perp_bot.cli:main"` +- `main.py` is only a compatibility shim for direct script-style execution + +### Testing + +- the test suite lives under `tests/` +- pytest is configured in `pyproject.toml` +- GitHub Actions runs tests on Python `3.12` and `3.13` + +### Linting + +- Ruff is configured in `pyproject.toml` +- current target version is Python `3.12` +- line length is `100` + +### Implementation Style Already Established In The Repo + +- dataclasses for typed config models +- module-local helper functions where orchestration would otherwise become noisy +- pandas/numpy for indicators and simulation logic +- SQLite as the default local system of record +- JSON structured logs rather than free-form text logs + +## Typical Workflows + +### Change Strategy or Signal Behavior + +Main files to inspect: + +- `src/perp_bot/signals/indicators.py` +- `src/perp_bot/signals/engine.py` +- `src/perp_bot/risk/manager.py` +- `src/perp_bot/backtest/engine.py` + +Recommended loop: + +1. update signal or risk logic +2. add or update tests +3. run `uv run pytest` +4. run `uv run perpbot backtest` +5. inspect downstream report changes + +### Change Execution Behavior + +Main files to inspect: + +- `src/perp_bot/execution/executor.py` +- `src/perp_bot/execution/live_executor.py` +- `src/perp_bot/cli.py` + +Be careful to preserve: + +- trade persistence semantics +- slippage tracking +- server-side stop-loss behavior +- startup reconciliation + +### Change Data Ingestion or Schema + +Main files to inspect: + +- `src/perp_bot/data/client.py` +- `src/perp_bot/data/ws_client.py` +- `src/perp_bot/data/ingest.py` +- `src/perp_bot/data/db.py` + +If you change schema or persistence behavior, also inspect: + +- `tui` +- reporting code +- backtest loaders +- tests that rely on the DB API + +### Add a New CLI Command + +The current CLI is centralized in `src/perp_bot/cli.py`. + +Typical steps: + +1. implement a `run_()` function +2. add the command name to `argparse` choices +3. wire the function into `main()` +4. add tests that cover dispatch or behavior +5. document the command in `README.md` + +## Working With the TUI + +The TUI is attach-only. It does not run the trading loop itself. + +Important implications: + +- start `perpbot trade` first +- then run `perpbot tui` +- trade history comes from SQLite +- volatile runtime state comes from the IPC socket +- the log panel tails the rotating daemon log file + +## Packaging and Releases + +Local validation: + +```bash +uv build +``` + +GitHub Actions: + +- `.github/workflows/ci.yml` runs lint, tests, and build validation +- `.github/workflows/release.yml` builds artifacts and creates a GitHub release when a `v*` tag is pushed + +Before cutting a release: + +1. update code and tests +2. update `CHANGELOG.md` +3. verify `README.md` if commands or config changed +4. run lint, tests, and build locally + +## Practical Tips + +- keep paper and live configs separate +- keep one database per runtime instance +- prefer reusing the existing typed config model instead of passing raw dicts through new code +- keep live and backtest semantics aligned when changing strategy behavior +- if you add new operator-facing behavior, document it in `README.md` and `docs/operations.md` diff --git a/docs/operations.md b/docs/operations.md new file mode 100644 index 0000000..1e11ccb --- /dev/null +++ b/docs/operations.md @@ -0,0 +1,234 @@ +# Operations and Deployment + +This document covers day-to-day operation of `perp-bot`, from local paper trading to systemd deployment. + +## Runtime Modes + +### Paper Mode + +Default mode. Uses `PaperExecutor` and records simulated trades in SQLite. + +Recommended for: + +- local development +- validating config changes +- checking TUI behavior +- smoke testing a new release + +### Live Mode + +Uses `LiveExecutor` and places real orders on Hyperliquid. + +Additional startup behavior: + +- requires `HL_PRIVATE_KEY` +- sets leverage for configured symbols +- reconciles exchange positions with local DB state +- attempts to attach server-side stop-loss protection + +## Paper Trading Runbook + +### Start a fresh local session + +```bash +cp .env.example .env +uv sync --group dev +uv run perpbot backfill +uv run perpbot trade +``` + +In another terminal: + +```bash +uv run perpbot status +uv run perpbot tui +``` + +### What to expect + +- SQLite DB at the configured `db_path` +- Unix socket next to that DB +- rotating log file next to that DB +- open and closed trades recorded in SQLite + +## Live Trading Checklist + +Before enabling live mode: + +1. confirm `mode: "live"` intentionally +2. verify symbol list and leverage +3. verify `capital_usd`, margin cap, and stop-loss settings +4. verify `HL_PRIVATE_KEY` +5. verify alert endpoints if you depend on Discord or Telegram +6. run a paper session with the same config shape first +7. verify `perpbot status` and `perpbot tui` against the daemon + +After startup, confirm: + +- leverage setup succeeded +- position reconciliation logs look correct +- WebSocket feed is healthy +- daemon status returns valid JSON + +## Safety Controls Worth Monitoring + +Built-in controls include: + +- per-trade capital stop loss +- daily loss limit +- cooldown after stop loss +- 24-hour position timeout +- max position count +- crisis-regime entry block +- three-consecutive-losing-weeks startup halt + +If `trade` refuses to start because of the losing-weeks halt, you can override it with: + +```bash +uv run perpbot trade --force +``` + +Use that only intentionally. + +## Daemon Interfaces + +### JSON status + +```bash +uv run perpbot status +``` + +This reads the daemon state over the Unix socket and prints JSON. + +### TUI + +```bash +uv run perpbot tui +``` + +Key bindings include: + +- `p`: pause the daemon +- `r`: resume the daemon +- `e`: emergency close flow +- `q`: quit the TUI + +The TUI is safe to attach and detach repeatedly. + +## Logging + +The app uses JSON structured logs. + +Outputs: + +- stdout +- rotating file log when the trading daemon starts the IPC/TUI stack + +Log file location: + +- `/perp-bot.log` + +Fields include: + +- timestamp +- level +- logger name +- message +- exception, when present + +## Deployment + +The repository includes: + +- `deploy/deploy.sh` +- `deploy/perp-bot.service` + +The deploy script assumes: + +- a GCP Compute Engine instance +- `gcloud` authenticated locally +- SSH access to the instance +- `uv` installed on the remote machine + +### Deployment flow + +```bash +./deploy/deploy.sh [zone] +``` + +The script: + +1. copies the repo to `/opt/perp-bot` +2. runs `uv sync --frozen` remotely +3. installs the bundled systemd service +4. restarts the service +5. prints service status + +### systemd expectations + +The service file currently expects: + +- app directory: `/opt/perp-bot` +- environment file: `/opt/perp-bot/.env` +- service user: `perp-bot` +- executable: `/opt/perp-bot/.venv/bin/perpbot trade` + +If your target environment differs, edit `deploy/perp-bot.service` before deployment. + +## Useful Service Commands + +On the host: + +```bash +sudo systemctl status perp-bot --no-pager +sudo journalctl -u perp-bot -n 200 --no-pager +sudo journalctl -u perp-bot -f +sudo systemctl restart perp-bot +sudo systemctl stop perp-bot +``` + +## Troubleshooting + +### `perpbot status` Says The Daemon Is Not Running + +Check: + +- whether `perpbot trade` is actually running +- whether the socket path matches the configured `db_path` +- whether the daemon has permissions to create files in the DB directory + +### `tui` Opens But Shows No Useful State + +Check: + +- the daemon is already running +- the TUI is pointed at the same config file +- the daemon has produced at least one tick +- the log file exists next to the socket + +### Trade Loop Warns About Missing Data + +Check: + +- `perpbot backfill` has been run +- the configured `primary_timeframe` exists in the DB +- `history_days` and timeframe are sufficient to satisfy indicator warmup + +### Live Mode Exits At Startup + +Check: + +- `HL_PRIVATE_KEY` +- exchange connectivity +- leverage configuration for the selected symbols +- whether reconciliation surfaced inconsistent local state + +### WebSocket Is Repeatedly Marked Stale + +Check: + +- network stability on the host +- whether the Hyperliquid WebSocket feed is reachable +- daemon logs around reconnect attempts + +The bot will try to reconnect automatically, but repeated staleness should be treated as an operational issue. From 99d00f6d2aefc6134667f6c8394ed52ae5b06562 Mon Sep 17 00:00:00 2001 From: morfize <233522679+morfize@users.noreply.github.com> Date: Fri, 20 Mar 2026 05:50:57 -0700 Subject: [PATCH 8/8] Add binary release distribution --- .github/workflows/ci.yml | 3 + .github/workflows/release.yml | 37 +++++++- CHANGELOG.md | 12 +++ README.md | 62 ++++++++----- docs/development.md | 22 +++-- docs/operations.md | 15 +-- main.py | 15 +++ perpbot | 15 +++ scripts/build-release-archive.sh | 28 ++++++ scripts/install.sh | 152 +++++++++++++++++++++++++++++++ tests/test_entrypoints.py | 38 ++++++++ tests/test_install_script.py | 110 ++++++++++++++++++++++ 12 files changed, 469 insertions(+), 40 deletions(-) mode change 100644 => 100755 main.py create mode 100755 perpbot create mode 100755 scripts/build-release-archive.sh create mode 100755 scripts/install.sh create mode 100644 tests/test_entrypoints.py create mode 100644 tests/test_install_script.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4f5acf9..4072ee5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -48,6 +48,9 @@ jobs: - name: Build distributions run: uv build + - name: Build standalone binary archive + run: ./scripts/build-release-archive.sh perpbot-linux-x86_64.tar.gz + - name: Upload build artifacts uses: actions/upload-artifact@v4 with: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0b41523..9500825 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -10,7 +10,7 @@ permissions: contents: write jobs: - release: + package-release: runs-on: ubuntu-latest steps: @@ -23,8 +23,41 @@ jobs: - name: Build distributions run: uv build - - name: Publish GitHub release + - name: Publish Python artifacts uses: softprops/action-gh-release@v2 with: files: dist/* generate_release_notes: true + + binary-release: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-latest + asset_name: perpbot-linux-x86_64.tar.gz + - os: macos-13 + asset_name: perpbot-macos-x86_64.tar.gz + - os: macos-14 + asset_name: perpbot-macos-arm64.tar.gz + + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Set up uv + uses: astral-sh/setup-uv@v5 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Build standalone binary archive + run: ./scripts/build-release-archive.sh ${{ matrix.asset_name }} + + - name: Publish binary artifact + uses: softprops/action-gh-release@v2 + with: + files: dist/* diff --git a/CHANGELOG.md b/CHANGELOG.md index c20170d..3464b62 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,18 @@ All notable changes to this project will be documented in this file. The format is based on Keep a Changelog, and this project follows Semantic Versioning. +## [Unreleased] + +### Added + +- One-line installer script that downloads a prebuilt standalone binary from GitHub Releases. +- Release packaging script for OS-specific standalone `perpbot` binaries. + +### Changed + +- README now treats bare `perpbot` usage as the primary CLI flow instead of `uv run perpbot`. +- Release automation now targets downloadable standalone binaries in addition to Python package artifacts. + ## [0.1.0] - 2026-03-19 ### Added diff --git a/README.md b/README.md index f10b838..8450f70 100644 --- a/README.md +++ b/README.md @@ -23,13 +23,21 @@ This repository is structured as an installable package with a `perpbot` console ## Quick Start -### 1. Install dependencies +### 1. Install the CLI ```bash -uv sync --group dev +curl -fsSL https://raw.githubusercontent.com/morfize/perp-bot/main/scripts/install.sh | sh ``` -### 2. Create local secrets +This installs a prebuilt standalone binary to `~/.local/bin/perpbot`. + +### 2. Verify the command + +```bash +perpbot --help +``` + +### 3. Create local secrets ```bash cp .env.example .env @@ -37,7 +45,7 @@ cp .env.example .env For paper trading, you can leave the Hyperliquid key empty. -### 3. Review the sample config +### 4. Review the sample config The repository ships with a starter [`config.yaml`](config.yaml). By default, `perpbot` reads: @@ -46,59 +54,63 @@ The repository ships with a starter [`config.yaml`](config.yaml). By default, `p If you pass `--config /path/to/config.yaml`, the CLI also loads `.env` from that config file's directory. -### 4. Backfill data +### 5. Backfill data ```bash -uv run perpbot backfill +perpbot backfill ``` -### 5. Run a backtest +### 6. Run a backtest ```bash -uv run perpbot backtest +perpbot backtest ``` -### 6. Start paper trading +### 7. Start paper trading ```bash -uv run perpbot trade +perpbot trade ``` In another terminal, you can inspect the daemon: ```bash -uv run perpbot status -uv run perpbot tui +perpbot status +perpbot tui ``` ## Installation -### Local development install +### One-line install ```bash -uv sync --group dev -uv run perpbot --help +curl -fsSL https://raw.githubusercontent.com/morfize/perp-bot/main/scripts/install.sh | sh ``` -### Install directly from GitHub +This downloads the latest GitHub release binary for your OS and installs it to `~/.local/bin/perpbot`. + +### Install a tagged release ```bash -uv tool install git+https://github.com/morfize/perp-bot.git -perpbot --help +curl -fsSL https://raw.githubusercontent.com/morfize/perp-bot/main/scripts/install.sh | env PERPBOT_VERSION=v0.1.0 sh ``` -### Install a tagged release +### Local development install ```bash -uv tool install git+https://github.com/morfize/perp-bot.git@v0.1.0 +uv sync --group dev +source .venv/bin/activate +perpbot --help ``` ### Compatibility shim -Direct execution still works: +Repo-local execution still works, but this is not the primary install path: ```bash -python main.py --help +./perpbot --help +./main.py --help +python3 main.py --help ``` ## Configuration @@ -165,7 +177,7 @@ Live mode sets leverage on startup, reconciles exchange positions against the lo Run the global help for all flags: ```bash -uv run perpbot --help +perpbot --help ``` ## Architecture Summary @@ -229,6 +241,7 @@ uv sync --group dev uv run ruff check src tests uv run pytest uv build +./scripts/build-release-archive.sh perpbot-macos-arm64.tar.gz ``` ### Test matrix @@ -237,7 +250,8 @@ GitHub Actions runs: - lint on `src` and `tests` - test suite on Python `3.12` and `3.13` -- package build validation +- Python package build validation +- standalone binary build validation ## Deployment diff --git a/docs/development.md b/docs/development.md index 1da5ca5..509db97 100644 --- a/docs/development.md +++ b/docs/development.md @@ -26,13 +26,14 @@ cp .env.example .env ### Verify the CLI ```bash -uv run perpbot --help +source .venv/bin/activate +perpbot --help ``` ### Bootstrap a local database ```bash -uv run perpbot backfill +perpbot backfill ``` Without a local database, many commands will not have useful data to work with. @@ -63,12 +64,18 @@ uv run ruff check src tests uv build ``` +### Build a standalone binary archive + +```bash +./scripts/build-release-archive.sh perpbot-macos-arm64.tar.gz +``` + ### Run the bot locally ```bash -uv run perpbot trade -uv run perpbot status -uv run perpbot tui +perpbot trade +perpbot status +perpbot tui ``` ## Project Conventions @@ -115,7 +122,7 @@ Recommended loop: 1. update signal or risk logic 2. add or update tests 3. run `uv run pytest` -4. run `uv run perpbot backtest` +4. run `perpbot backtest` 5. inspect downstream report changes ### Change Execution Behavior @@ -179,12 +186,13 @@ Local validation: ```bash uv build +./scripts/build-release-archive.sh perpbot-macos-arm64.tar.gz ``` GitHub Actions: - `.github/workflows/ci.yml` runs lint, tests, and build validation -- `.github/workflows/release.yml` builds artifacts and creates a GitHub release when a `v*` tag is pushed +- `.github/workflows/release.yml` builds Python artifacts plus standalone release binaries when a `v*` tag is pushed Before cutting a release: diff --git a/docs/operations.md b/docs/operations.md index 1e11ccb..fd8db44 100644 --- a/docs/operations.md +++ b/docs/operations.md @@ -33,15 +33,16 @@ Additional startup behavior: ```bash cp .env.example .env uv sync --group dev -uv run perpbot backfill -uv run perpbot trade +source .venv/bin/activate +perpbot backfill +perpbot trade ``` In another terminal: ```bash -uv run perpbot status -uv run perpbot tui +perpbot status +perpbot tui ``` ### What to expect @@ -85,7 +86,7 @@ Built-in controls include: If `trade` refuses to start because of the losing-weeks halt, you can override it with: ```bash -uv run perpbot trade --force +perpbot trade --force ``` Use that only intentionally. @@ -95,7 +96,7 @@ Use that only intentionally. ### JSON status ```bash -uv run perpbot status +perpbot status ``` This reads the daemon state over the Unix socket and prints JSON. @@ -103,7 +104,7 @@ This reads the daemon state over the Unix socket and prints JSON. ### TUI ```bash -uv run perpbot tui +perpbot tui ``` Key bindings include: diff --git a/main.py b/main.py old mode 100644 new mode 100755 index 1268345..26201b1 --- a/main.py +++ b/main.py @@ -1,5 +1,20 @@ +#!/usr/bin/env python3 """Compatibility shim for direct ``python main.py ...`` usage.""" +import sys +import os +from pathlib import Path + +ROOT = Path(__file__).resolve().parent +SRC = ROOT / "src" +VENV_PYTHON = ROOT / ".venv" / "bin" / "python" + +if VENV_PYTHON.exists() and Path(sys.executable).resolve() != VENV_PYTHON.resolve(): + os.execv(str(VENV_PYTHON), [str(VENV_PYTHON), str(Path(__file__).resolve()), *sys.argv[1:]]) + +if str(SRC) not in sys.path: + sys.path.insert(0, str(SRC)) + from perp_bot.cli import main diff --git a/perpbot b/perpbot new file mode 100755 index 0000000..2929298 --- /dev/null +++ b/perpbot @@ -0,0 +1,15 @@ +#!/bin/sh +set -eu + +ROOT="$(CDPATH= cd -- "$(dirname -- "$0")" && pwd)" + +if [ -x "$ROOT/.venv/bin/perpbot" ]; then + exec "$ROOT/.venv/bin/perpbot" "$@" +fi + +if command -v python3 >/dev/null 2>&1; then + exec python3 "$ROOT/main.py" "$@" +fi + +printf '%s\n' "python3 not found" >&2 +exit 1 diff --git a/scripts/build-release-archive.sh b/scripts/build-release-archive.sh new file mode 100755 index 0000000..f068c85 --- /dev/null +++ b/scripts/build-release-archive.sh @@ -0,0 +1,28 @@ +#!/bin/sh +set -eu + +if [ "$#" -ne 1 ]; then + printf 'Usage: %s \n' "$0" >&2 + exit 1 +fi + +ASSET_NAME="$1" +ROOT="$(CDPATH= cd -- "$(dirname -- "$0")/.." && pwd)" + +cd "$ROOT" +rm -rf build/pyinstaller build/spec +mkdir -p build/binary build/pyinstaller build/spec dist + +uvx --from pyinstaller pyinstaller \ + --noconfirm \ + --clean \ + --onefile \ + --name perpbot \ + --paths src \ + --distpath build/binary \ + --workpath build/pyinstaller \ + --specpath build/spec \ + main.py + +tar -C build/binary -czf "dist/$ASSET_NAME" perpbot +shasum -a 256 "dist/$ASSET_NAME" > "dist/$ASSET_NAME.sha256" diff --git a/scripts/install.sh b/scripts/install.sh new file mode 100755 index 0000000..24c65dc --- /dev/null +++ b/scripts/install.sh @@ -0,0 +1,152 @@ +#!/bin/sh +set -eu + +REPO_SLUG="${PERPBOT_REPO_SLUG:-morfize/perp-bot}" +VERSION="${PERPBOT_VERSION:-latest}" +INSTALL_DIR="${PERPBOT_INSTALL_DIR:-$HOME/.local/bin}" +TMP_DIR="${TMPDIR:-/tmp}" + +print_help() { + cat <&2 + exit 1 + ;; + esac +} + +detect_arch() { + arch="${PERPBOT_ARCH:-$(uname -m)}" + case "$arch" in + x86_64|amd64) + printf '%s\n' "x86_64" + ;; + arm64|aarch64) + printf '%s\n' "arm64" + ;; + *) + printf 'Unsupported architecture: %s\n' "$arch" >&2 + exit 1 + ;; + esac +} + +asset_name() { + os="$(detect_os)" + arch="$(detect_arch)" + printf 'perpbot-%s-%s.tar.gz\n' "$os" "$arch" +} + +download_url() { + asset="$(asset_name)" + if [ "$VERSION" = "latest" ]; then + printf 'https://github.com/%s/releases/latest/download/%s\n' "$REPO_SLUG" "$asset" + return 0 + fi + + printf 'https://github.com/%s/releases/download/%s/%s\n' "$REPO_SLUG" "$VERSION" "$asset" +} + +download_file() { + url="$1" + output="$2" + + if command -v curl >/dev/null 2>&1; then + curl -fsSL "$url" -o "$output" + return 0 + fi + + if command -v wget >/dev/null 2>&1; then + wget -qO "$output" "$url" + return 0 + fi + + printf '%s\n' "Neither curl nor wget is available to download $url" >&2 + exit 1 +} + +case "${1:-}" in + --print-asset) + asset_name + exit 0 + ;; + --print-url) + download_url + exit 0 + ;; + --print-install-dir) + printf '%s\n' "$INSTALL_DIR" + exit 0 + ;; + -h|--help) + print_help + exit 0 + ;; + "") + ;; + *) + printf 'Unknown argument: %s\n' "$1" >&2 + print_help >&2 + exit 1 + ;; +esac + +asset="$(asset_name)" +url="$(download_url)" +archive="$TMP_DIR/$asset.$$" +extract_dir="$TMP_DIR/perpbot-install.$$" + +cleanup() { + rm -f "$archive" + rm -rf "$extract_dir" +} + +trap cleanup EXIT HUP INT TERM + +mkdir -p "$INSTALL_DIR" +mkdir -p "$extract_dir" + +printf 'Downloading %s\n' "$url" >&2 +download_file "$url" "$archive" +tar -xzf "$archive" -C "$extract_dir" +install -m 755 "$extract_dir/perpbot" "$INSTALL_DIR/perpbot" + +cat < None: + result = subprocess.run( + ["./main.py", "--help"], + cwd=ROOT, + capture_output=True, + text=True, + check=False, + ) + + assert result.returncode == 0 + assert "Hyperliquid mean-reversion bot" in result.stdout + + +def test_repo_launcher_help_runs() -> None: + result = subprocess.run( + ["./perpbot", "--help"], + cwd=ROOT, + capture_output=True, + text=True, + check=False, + env={**os.environ, "PATH": os.environ["PATH"]}, + ) + + assert result.returncode == 0 + assert "Hyperliquid mean-reversion bot" in result.stdout diff --git a/tests/test_install_script.py b/tests/test_install_script.py new file mode 100644 index 0000000..c4f1fe3 --- /dev/null +++ b/tests/test_install_script.py @@ -0,0 +1,110 @@ +"""Tests for the binary installer script.""" + +from __future__ import annotations + +import os +import subprocess +from pathlib import Path + + +ROOT = Path(__file__).resolve().parents[1] +SCRIPT = ROOT / "scripts" / "install.sh" + + +def test_install_script_prints_default_asset_name() -> None: + env = {**os.environ, "PERPBOT_OS": "Darwin", "PERPBOT_ARCH": "arm64"} + result = subprocess.run( + ["sh", str(SCRIPT), "--print-asset"], + cwd=ROOT, + capture_output=True, + text=True, + check=False, + env=env, + ) + + assert result.returncode == 0 + assert result.stdout.strip() == "perpbot-macos-arm64.tar.gz" + + +def test_install_script_prints_latest_download_url() -> None: + env = {**os.environ, "PERPBOT_OS": "Linux", "PERPBOT_ARCH": "x86_64"} + result = subprocess.run( + ["sh", str(SCRIPT), "--print-url"], + cwd=ROOT, + capture_output=True, + text=True, + check=False, + env=env, + ) + + assert result.returncode == 0 + assert result.stdout.strip() == ( + "https://github.com/morfize/perp-bot/releases/latest/download/perpbot-linux-x86_64.tar.gz" + ) + + +def test_install_script_prints_tagged_download_url() -> None: + env = { + **os.environ, + "PERPBOT_OS": "Darwin", + "PERPBOT_ARCH": "x86_64", + "PERPBOT_VERSION": "v0.1.0", + } + result = subprocess.run( + ["sh", str(SCRIPT), "--print-url"], + cwd=ROOT, + capture_output=True, + text=True, + check=False, + env=env, + ) + + assert result.returncode == 0 + assert result.stdout.strip() == ( + "https://github.com/morfize/perp-bot/releases/download/v0.1.0/perpbot-macos-x86_64.tar.gz" + ) + + +def test_install_script_prints_install_dir_override() -> None: + env = {**os.environ, "PERPBOT_INSTALL_DIR": "/tmp/perpbot-bin"} + result = subprocess.run( + ["sh", str(SCRIPT), "--print-install-dir"], + cwd=ROOT, + capture_output=True, + text=True, + check=False, + env=env, + ) + + assert result.returncode == 0 + assert result.stdout.strip() == "/tmp/perpbot-bin" + + +def test_install_script_rejects_unsupported_os() -> None: + env = {**os.environ, "PERPBOT_OS": "FreeBSD", "PERPBOT_ARCH": "x86_64"} + result = subprocess.run( + ["sh", str(SCRIPT), "--print-asset"], + cwd=ROOT, + capture_output=True, + text=True, + check=False, + env=env, + ) + + assert result.returncode == 1 + assert "Unsupported operating system: FreeBSD" in result.stderr + + +def test_install_script_rejects_unsupported_arch() -> None: + env = {**os.environ, "PERPBOT_OS": "Linux", "PERPBOT_ARCH": "riscv64"} + result = subprocess.run( + ["sh", str(SCRIPT), "--print-asset"], + cwd=ROOT, + capture_output=True, + text=True, + check=False, + env=env, + ) + + assert result.returncode == 1 + assert "Unsupported architecture: riscv64" in result.stderr