fix: resolve critical/important bugs from code review (#1,#2,#4,#5,#6,#8)

- #1: OI division by zero — already fixed (prev_oi == 0.0 guard exists)
- #2: cumulative trade count used max() instead of sum(), breaking ML trigger
- #4: fetch_history API calls now retry 3x with exponential backoff
- #5: parquet upsert now deduplicates timestamps before sort
- #6: record_pnl() is now async with Lock for multi-symbol safety
- #8: exit_price=0.0 skips close handling with warning log

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
21in7
2026-03-07 03:06:48 +09:00
parent 0a8748913e
commit 60510c026b
5 changed files with 31 additions and 11 deletions

View File

@@ -44,12 +44,23 @@ async def _fetch_klines_with_client(
start_ts = int((datetime.now(timezone.utc) - timedelta(days=days)).timestamp() * 1000)
all_klines = []
while True:
klines = await client.futures_klines(
symbol=symbol,
interval=interval,
startTime=start_ts,
limit=1500,
)
for attempt in range(3):
try:
klines = await client.futures_klines(
symbol=symbol,
interval=interval,
startTime=start_ts,
limit=1500,
)
break
except Exception as e:
if attempt < 2:
wait = 2 ** (attempt + 1)
print(f" [{symbol}] API 오류 ({e}), {wait}초 후 재시도 ({attempt+1}/3)")
await asyncio.sleep(wait)
else:
print(f" [{symbol}] API 3회 실패, 수집 중단: {e}")
raise
if not klines:
break
all_klines.extend(klines)
@@ -311,6 +322,7 @@ def upsert_parquet(path: "Path | str", new_df: pd.DataFrame) -> pd.DataFrame:
if col in existing.columns:
existing[col] = existing[col].fillna(0.0)
existing = existing[~existing.index.duplicated(keep='last')]
return existing.sort_index()

View File

@@ -418,7 +418,7 @@ def generate_report(
for rpath in sorted(rdir.glob("report_*.json")):
try:
prev = json.loads(rpath.read_text())
cumulative = max(cumulative, prev.get("live_trades", {}).get("count", 0))
cumulative += prev.get("live_trades", {}).get("count", 0)
except (json.JSONDecodeError, KeyError):
pass