feat(v3.2): DailyLedger + RiskGate + news_snapshot + backtest_runner

- DailyLedger: 당일 매수 회계 + 연속 손절 카운터 + 매수 신호 점수 한 객체로 집약 (bot.py 정리)
- RiskGate: 테마당 동시 보유 + 노출 비율 상한 검증 (포트폴리오 레벨)
- news_snapshot: 뉴스 SQLite 영구 저장 + 사후 감성 재검증 인프라
- backtest_runner: 전 종목 KIS 일봉 기반 백테스트 (Sharpe/MDD/Calmar)
- bot.py 274 line 정리 (DailyLedger 분리)
- backtest.py 173 line 재작성 (v3.2 next-bar 체결 + 거래세)
- daily_launcher.py 폐기 (warmup_and_restart 통합)
- .gitignore: .claude/ 제외

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-05-16 02:57:26 +09:00
parent 0aebca7ff0
commit 42b91d03cf
12 changed files with 869 additions and 492 deletions

View File

@@ -1,6 +1,23 @@
import time
import requests
import xml.etree.ElementTree as ET
from typing import Optional
def _parse_items(root, max_items):
"""RSS item → [{title, url, pub_date, source}]"""
out = []
for item in root.findall(".//item")[:max_items]:
t = item.find("title")
l = item.find("link")
p = item.find("pubDate")
title = (t.text or "").strip() if t is not None else ""
url = (l.text or "").strip() if l is not None else ""
pub = (p.text or "").strip() if p is not None else ""
if not title:
continue
out.append({"title": title, "url": url, "pub_date": pub, "source": "Google News"})
return out
class NewsCollector:
@@ -11,24 +28,29 @@ class NewsCollector:
try:
resp = requests.get(url, timeout=5)
root = ET.fromstring(resp.content)
items = []
for item in root.findall(".//item")[:5]:
title = item.find("title").text
items.append({"title": title, "source": "Google News"})
return items
return _parse_items(root, 5)
except Exception as e:
print(f"[News] Collection failed: {e}")
return []
class AsyncNewsCollector:
"""비동기 뉴스 수집 + 5분 캐싱"""
"""비동기 뉴스 수집 + 5분 캐싱 + (옵션) 스냅샷 저장"""
def __init__(self):
def __init__(self, snapshot_store=None):
self._cache = None
self._cache_time = 0
self._cache_ttl = 300 # 5분
self._stock_cache = {} # {stock_name: (items, timestamp)}
self._snap = snapshot_store # NewsSnapshotStore | None
def _save_snapshot(self, items, query: str, ticker: Optional[str] = None):
if not self._snap or not items:
return
try:
self._snap.save_many(items, query=query, ticker=ticker)
except Exception as e:
print(f"[News] snapshot 저장 실패: {e}")
def get_market_news(self, query="주식 시장"):
"""동기 인터페이스 (하위 호환)"""
@@ -39,6 +61,7 @@ class AsyncNewsCollector:
result = NewsCollector.get_market_news(query)
self._cache = result
self._cache_time = now
self._save_snapshot(result, query=query)
return result
async def get_market_news_async(self, query="주식 시장"):
@@ -54,13 +77,10 @@ class AsyncNewsCollector:
async with session.get(url, timeout=aiohttp.ClientTimeout(total=5)) as resp:
content = await resp.read()
root = ET.fromstring(content)
items = []
for item in root.findall(".//item")[:5]:
title = item.find("title").text
items.append({"title": title, "source": "Google News"})
items = _parse_items(root, 5)
self._cache = items
self._cache_time = now
self._save_snapshot(items, query=query)
return items
except ImportError:
return self.get_market_news(query)
@@ -70,9 +90,10 @@ class AsyncNewsCollector:
return self._cache
return self.get_market_news(query)
async def get_stock_news_async(self, stock_name, max_items=3):
async def get_stock_news_async(self, stock_name, max_items=3, ticker: Optional[str] = None):
"""종목별 뉴스 수집 (5분 캐싱)
stock_name: 종목 이름 (e.g. '삼성전자', 'SK하이닉스')
ticker: 스냅샷 저장 시 종목코드 (옵션)
"""
now = time.time()
cached = self._stock_cache.get(stock_name)
@@ -88,13 +109,9 @@ class AsyncNewsCollector:
async with session.get(url, timeout=aiohttp.ClientTimeout(total=5)) as resp:
content = await resp.read()
root = ET.fromstring(content)
items = []
for item in root.findall(".//item")[:max_items]:
title_el = item.find("title")
if title_el is not None and title_el.text:
items.append({"title": title_el.text, "source": "Google News"})
items = _parse_items(root, max_items)
self._stock_cache[stock_name] = (items, now)
self._save_snapshot(items, query=f"{stock_name} 주가", ticker=ticker)
return items
except Exception as e:
print(f"[News] 종목 뉴스 수집 실패 ({stock_name}): {e}")