fix: deduplicate recent_large_trades to prevent memory bloat
Each evaluate cycle was re-appending all qualifying trades from win_fast, causing duplicates. Added seen-set dedup.
This commit is contained in:
parent
8b73500d22
commit
9528d69a42
@ -261,9 +261,12 @@ class SymbolState:
|
||||
cutoff = now_ms - 15 * 60 * 1000
|
||||
while self.recent_large_trades and self.recent_large_trades[0][0] < cutoff:
|
||||
self.recent_large_trades.popleft()
|
||||
# 只检查新trade(避免重复添加)
|
||||
seen = set(t[0] for t in self.recent_large_trades) # time_ms作为去重key
|
||||
for t in self.win_fast.trades:
|
||||
if t[1] >= p99 and t[0] > cutoff:
|
||||
if t[1] >= p99 and t[0] > cutoff and t[0] not in seen:
|
||||
self.recent_large_trades.append((t[0], t[1], t[3]))
|
||||
seen.add(t[0])
|
||||
|
||||
def evaluate_signal(self, now_ms: int) -> dict:
|
||||
cvd_fast = self.win_fast.cvd
|
||||
|
||||
Loading…
Reference in New Issue
Block a user