arbitrage-engine/backend/liquidation_collector.py
fanziqi ad60a53262 review: add code audit annotations and REVIEW.md for v5.1
P0 issues annotated (critical, must fix before live trading):
- signal_engine.py: cooldown blocks reverse-signal position close
- paper_monitor.py + signal_engine.py: pnl_r 2x inflated for TP scenarios
- signal_engine.py: entry price uses 30min VWAP instead of real-time price
- paper_monitor.py + signal_engine.py: concurrent write race on paper_trades

P1 issues annotated (long-term stability):
- db.py: ensure_partitions uses timedelta(30d) causing missed monthly partitions
- signal_engine.py: float precision drift in buy_vol/sell_vol accumulation
- market_data_collector.py: single bare connection with no reconnect logic
- db.py: get_sync_pool initialization not thread-safe
- signal_engine.py: recent_large_trades deque has no maxlen

P2/P3 issues annotated across backend and frontend:
- coinbase_premium KeyError for XRP/SOL symbols
- liquidation_collector: redundant elif condition in aggregation logic
- auth.py: JWT secret hardcoded default, login rate-limit absent
- Frontend: concurrent refresh token race, AuthContext not synced on failure
- Frontend: universal catch{} swallows all API errors silently
- Frontend: serial API requests in LatestSignals, market-indicators over-polling

docs/REVIEW.md: comprehensive audit report with all 34 issues (P0×4, P1×5,
P2×6, P3×4 backend + FE-P1×4, FE-P2×8, FE-P3×3 frontend), fix suggestions
and prioritized remediation roadmap.
2026-03-01 17:14:52 +08:00

143 lines
5.7 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""
清算数据采集器 — 币安WS forceOrder实时流
存入 market_indicators 表indicator_type = 'liquidation'
每笔清算记录symbol, side, price, qty, trade_time
每5分钟汇总一次long_liq_usd, short_liq_usd, total_liq_usd, count
"""
import asyncio
import json
import logging
import time
import websockets
from db import get_sync_conn
logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s")
logger = logging.getLogger("liquidation_collector")
SYMBOLS = ["BTCUSDT", "ETHUSDT", "XRPUSDT", "SOLUSDT"]
WS_URL = "wss://fstream.binance.com/stream?streams=" + "/".join(
f"{s.lower()}@forceOrder" for s in SYMBOLS
)
# 5分钟聚合窗口
AGG_INTERVAL = 300 # seconds
def ensure_table():
with get_sync_conn() as conn:
with conn.cursor() as cur:
cur.execute("""
CREATE TABLE IF NOT EXISTS liquidations (
id BIGSERIAL PRIMARY KEY,
symbol TEXT NOT NULL,
side TEXT NOT NULL,
price DOUBLE PRECISION NOT NULL,
qty DOUBLE PRECISION NOT NULL,
usd_value DOUBLE PRECISION NOT NULL,
trade_time BIGINT NOT NULL,
created_at TIMESTAMP DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_liquidations_symbol_time
ON liquidations(symbol, trade_time DESC);
""")
conn.commit()
logger.info("liquidations table ensured")
def save_liquidation(symbol: str, side: str, price: float, qty: float, usd_value: float, trade_time: int):
with get_sync_conn() as conn:
with conn.cursor() as cur:
cur.execute(
"INSERT INTO liquidations (symbol, side, price, qty, usd_value, trade_time) "
"VALUES (%s, %s, %s, %s, %s, %s)",
(symbol, side, price, qty, usd_value, trade_time)
)
conn.commit()
def save_aggregated(symbol: str, ts_ms: int, long_liq_usd: float, short_liq_usd: float, count: int):
"""每5分钟汇总存入market_indicators供signal_engine读取"""
payload = json.dumps({
"long_liq_usd": round(long_liq_usd, 2),
"short_liq_usd": round(short_liq_usd, 2),
"total_liq_usd": round(long_liq_usd + short_liq_usd, 2),
"count": count,
})
with get_sync_conn() as conn:
with conn.cursor() as cur:
cur.execute(
"INSERT INTO market_indicators (symbol, indicator_type, timestamp_ms, value) "
"VALUES (%s, %s, %s, %s)",
(symbol, "liquidation", ts_ms, payload)
)
conn.commit()
async def run():
ensure_table()
# 每个symbol的聚合缓冲
agg = {s: {"long_usd": 0.0, "short_usd": 0.0, "count": 0, "window_start": int(time.time())} for s in SYMBOLS}
while True:
try:
logger.info("Connecting to Binance forceOrder WS...")
async with websockets.connect(WS_URL, ping_interval=20, ping_timeout=10) as ws:
logger.info("Connected! Listening for liquidations...")
async for msg in ws:
data = json.loads(msg)
if "data" not in data:
continue
order = data["data"]["o"]
symbol = order["s"]
side = order["S"] # BUY = short被清算, SELL = long被清算
price = float(order["p"])
qty = float(order["q"])
usd_value = price * qty
trade_time = order["T"]
# 清算方向BUY=空头被爆仓, SELL=多头被爆仓
liq_side = "SHORT" if side == "BUY" else "LONG"
# 存入原始记录
save_liquidation(symbol, liq_side, price, qty, usd_value, trade_time)
logger.info(f"[{symbol}] 💥 {liq_side} liquidation: {qty} @ ${price:.2f} = ${usd_value:,.0f}")
# 聚合
if symbol in agg:
buf = agg[symbol]
if liq_side == "LONG":
buf["long_usd"] += usd_value
else:
buf["short_usd"] += usd_value
buf["count"] += 1
# 检查是否到了5分钟聚合窗口
now = int(time.time())
for sym in SYMBOLS:
buf = agg[sym]
if now - buf["window_start"] >= AGG_INTERVAL:
if buf["count"] > 0:
save_aggregated(sym, now * 1000, buf["long_usd"], buf["short_usd"], buf["count"])
logger.info(f"[{sym}] 📊 5min agg: long=${buf['long_usd']:,.0f} short=${buf['short_usd']:,.0f} count={buf['count']}")
# [REVIEW] P2 | elif 条件冗余:已在 if AGG_INTERVAL 内elif 永远成立
# 实际等价于 else无需重复判断时间条件
# 即使没清算也写一条0记录保持连贯
elif now - buf["window_start"] >= AGG_INTERVAL:
save_aggregated(sym, now * 1000, 0, 0, 0)
buf["long_usd"] = 0.0
buf["short_usd"] = 0.0
buf["count"] = 0
buf["window_start"] = now
except Exception as e:
logger.error(f"WS error: {e}, reconnecting in 5s...")
await asyncio.sleep(5)
if __name__ == "__main__":
asyncio.run(run())