Compare commits
10 Commits
55f2fa9cff
...
119ac88e1e
| Author | SHA1 | Date | |
|---|---|---|---|
| 119ac88e1e | |||
| c4cb18a25c | |||
| 50e811c5dd | |||
| 5ec7c2461b | |||
| 5f0fed7f13 | |||
| 070f2de3f1 | |||
| 01ebd2e7d9 | |||
| 7db9869722 | |||
| 97cb38ca7f | |||
| 90c408aa77 |
@@ -119,7 +119,125 @@ class StockAgent(BaseAgent):
|
|||||||
update_task_status(task_id, "failed", {"error": str(e)})
|
update_task_status(task_id, "failed", {"error": str(e)})
|
||||||
await self.transition("idle", f"오류: {e}")
|
await self.transition("idle", f"오류: {e}")
|
||||||
|
|
||||||
|
async def on_screener_schedule(self) -> None:
|
||||||
|
"""KRX 강세주 스크리너 자동 잡 (평일 16:30 KST).
|
||||||
|
|
||||||
|
흐름:
|
||||||
|
1) snapshot/refresh — 일봉 갱신 (실패해도 진행, 경고 로그)
|
||||||
|
2) screener/run mode='auto' — 실행 + 결과 영구화 + telegram_payload 응답
|
||||||
|
3) status=='skipped_holiday' → 종료 (텔레그램 미발신)
|
||||||
|
4) status=='success' → telegram_payload.text 를 parse_mode 그대로 전송
|
||||||
|
5) 예외/실패 → 운영자에게 별도 텔레그램 알림 (HTML)
|
||||||
|
"""
|
||||||
|
if self.state not in ("idle", "break"):
|
||||||
|
return
|
||||||
|
|
||||||
|
task_id = create_task(self.agent_id, "screener_run", {"mode": "auto"})
|
||||||
|
await self.transition("working", "스크리너 스냅샷 갱신 중...", task_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# 1) 스냅샷 갱신 — 실패해도 기존 일봉 데이터로 진행
|
||||||
|
try:
|
||||||
|
snap = await service_proxy.refresh_screener_snapshot()
|
||||||
|
add_log(
|
||||||
|
self.agent_id,
|
||||||
|
f"snapshot refreshed: status={snap.get('status', '?')}",
|
||||||
|
"info", task_id,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
add_log(
|
||||||
|
self.agent_id,
|
||||||
|
f"스냅샷 갱신 실패 (기존 데이터로 진행): {e}",
|
||||||
|
"warning", task_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
await self.transition("working", "스크리너 실행 중...")
|
||||||
|
|
||||||
|
# 2) 스크리너 실행
|
||||||
|
body = await service_proxy.run_stock_screener(mode="auto")
|
||||||
|
status = body.get("status")
|
||||||
|
asof = body.get("asof")
|
||||||
|
|
||||||
|
# 3) 공휴일 — 종료
|
||||||
|
if status == "skipped_holiday":
|
||||||
|
update_task_status(task_id, "succeeded", {
|
||||||
|
"status": status,
|
||||||
|
"asof": asof,
|
||||||
|
"telegram_sent": False,
|
||||||
|
})
|
||||||
|
add_log(self.agent_id, f"스크리너 건너뜀 (휴일): {asof}", "info", task_id)
|
||||||
|
await self.transition("idle", "휴일 — 스크리너 건너뜀")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 4) 성공 → 텔레그램 전송
|
||||||
|
if status == "success":
|
||||||
|
payload = body.get("telegram_payload") or {}
|
||||||
|
text = payload.get("text") or ""
|
||||||
|
parse_mode = payload.get("parse_mode", "MarkdownV2")
|
||||||
|
|
||||||
|
if not text:
|
||||||
|
raise RuntimeError("telegram_payload.text 누락")
|
||||||
|
|
||||||
|
await self.transition("reporting", "스크리너 결과 전송 중...")
|
||||||
|
|
||||||
|
from ..telegram.messaging import send_raw
|
||||||
|
tg = await send_raw(text, parse_mode=parse_mode)
|
||||||
|
|
||||||
|
update_task_status(task_id, "succeeded", {
|
||||||
|
"status": status,
|
||||||
|
"asof": asof,
|
||||||
|
"run_id": body.get("run_id"),
|
||||||
|
"survivors_count": body.get("survivors_count"),
|
||||||
|
"telegram_sent": tg.get("ok", False),
|
||||||
|
"telegram_message_id": tg.get("message_id"),
|
||||||
|
})
|
||||||
|
|
||||||
|
if not tg.get("ok"):
|
||||||
|
desc = tg.get("description") or "unknown"
|
||||||
|
code = tg.get("error_code")
|
||||||
|
add_log(
|
||||||
|
self.agent_id,
|
||||||
|
f"Screener telegram send failed: [{code}] {desc}",
|
||||||
|
"warning", task_id,
|
||||||
|
)
|
||||||
|
if self._ws_manager:
|
||||||
|
await self._ws_manager.send_notification(
|
||||||
|
self.agent_id, "telegram_failed", task_id,
|
||||||
|
"스크리너 텔레그램 전송 실패",
|
||||||
|
)
|
||||||
|
|
||||||
|
await self.transition("idle", "스크리너 완료")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 5) 기타 status — failed 취급
|
||||||
|
raise RuntimeError(f"unexpected screener status: {status}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
err_msg = str(e)
|
||||||
|
add_log(self.agent_id, f"Screener job failed: {err_msg}", "error", task_id)
|
||||||
|
update_task_status(task_id, "failed", {"error": err_msg})
|
||||||
|
|
||||||
|
# 운영자 알림 — 기본 HTML parse_mode 사용
|
||||||
|
try:
|
||||||
|
from ..telegram.messaging import send_raw
|
||||||
|
await send_raw(
|
||||||
|
f"⚠️ <b>KRX 스크리너 실패</b>\n"
|
||||||
|
f"<code>{html.escape(err_msg)[:500]}</code>"
|
||||||
|
)
|
||||||
|
except Exception as notify_err:
|
||||||
|
add_log(
|
||||||
|
self.agent_id,
|
||||||
|
f"operator notify failed: {notify_err}",
|
||||||
|
"warning", task_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
await self.transition("idle", f"스크리너 오류: {err_msg[:80]}")
|
||||||
|
|
||||||
async def on_command(self, command: str, params: dict) -> dict:
|
async def on_command(self, command: str, params: dict) -> dict:
|
||||||
|
if command == "run_screener":
|
||||||
|
await self.on_screener_schedule()
|
||||||
|
return {"ok": True, "message": "스크리너 실행 트리거 완료"}
|
||||||
|
|
||||||
if command == "test_telegram":
|
if command == "test_telegram":
|
||||||
from ..telegram import send_agent_message
|
from ..telegram import send_agent_message
|
||||||
result = await send_agent_message(
|
result = await send_agent_message(
|
||||||
|
|||||||
@@ -14,6 +14,11 @@ async def _run_stock_schedule():
|
|||||||
if agent:
|
if agent:
|
||||||
await agent.on_schedule()
|
await agent.on_schedule()
|
||||||
|
|
||||||
|
async def _run_stock_screener():
|
||||||
|
agent = AGENT_REGISTRY.get("stock")
|
||||||
|
if agent:
|
||||||
|
await agent.on_screener_schedule()
|
||||||
|
|
||||||
async def _run_blog_schedule():
|
async def _run_blog_schedule():
|
||||||
agent = AGENT_REGISTRY.get("blog")
|
agent = AGENT_REGISTRY.get("blog")
|
||||||
if agent:
|
if agent:
|
||||||
@@ -41,6 +46,14 @@ async def _poll_pipelines():
|
|||||||
|
|
||||||
def init_scheduler():
|
def init_scheduler():
|
||||||
scheduler.add_job(_run_stock_schedule, "cron", hour=7, minute=30, id="stock_news")
|
scheduler.add_job(_run_stock_schedule, "cron", hour=7, minute=30, id="stock_news")
|
||||||
|
scheduler.add_job(
|
||||||
|
_run_stock_screener,
|
||||||
|
"cron",
|
||||||
|
day_of_week="mon-fri",
|
||||||
|
hour=16,
|
||||||
|
minute=30,
|
||||||
|
id="stock_screener",
|
||||||
|
)
|
||||||
scheduler.add_job(_run_blog_schedule, "cron", hour=10, minute=0, id="blog_pipeline")
|
scheduler.add_job(_run_blog_schedule, "cron", hour=10, minute=0, id="blog_pipeline")
|
||||||
scheduler.add_job(_run_lotto_schedule, "cron", day_of_week="mon", hour=9, minute=0, id="lotto_curate")
|
scheduler.add_job(_run_lotto_schedule, "cron", day_of_week="mon", hour=9, minute=0, id="lotto_curate")
|
||||||
scheduler.add_job(_run_youtube_research, "cron", hour=9, minute=0, id="youtube_research")
|
scheduler.add_job(_run_youtube_research, "cron", hour=9, minute=0, id="youtube_research")
|
||||||
|
|||||||
@@ -32,6 +32,34 @@ async def summarize_stock_news(limit: int = 15) -> Dict[str, Any]:
|
|||||||
return resp.json()
|
return resp.json()
|
||||||
|
|
||||||
|
|
||||||
|
async def refresh_screener_snapshot() -> Dict[str, Any]:
|
||||||
|
"""stock-lab의 KRX 일봉 스냅샷 갱신 (스크리너 실행 전 호출).
|
||||||
|
|
||||||
|
네이버 금융 일괄 다운로드라 보통 30~120s, 여유있게 180s.
|
||||||
|
"""
|
||||||
|
async with httpx.AsyncClient(timeout=180.0) as client:
|
||||||
|
resp = await client.post(f"{STOCK_LAB_URL}/api/stock/screener/snapshot/refresh")
|
||||||
|
resp.raise_for_status()
|
||||||
|
return resp.json()
|
||||||
|
|
||||||
|
|
||||||
|
async def run_stock_screener(mode: str = "auto") -> Dict[str, Any]:
|
||||||
|
"""stock-lab의 스크리너 실행.
|
||||||
|
|
||||||
|
반환 status:
|
||||||
|
- 'skipped_holiday': 공휴일/주말 — telegram_payload 없음
|
||||||
|
- 'success': telegram_payload 동봉
|
||||||
|
엔진 자체는 수 초 내 끝나지만, 컨텍스트 로드+200종목 처리 여유 180s.
|
||||||
|
"""
|
||||||
|
async with httpx.AsyncClient(timeout=180.0) as client:
|
||||||
|
resp = await client.post(
|
||||||
|
f"{STOCK_LAB_URL}/api/stock/screener/run",
|
||||||
|
json={"mode": mode},
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
return resp.json()
|
||||||
|
|
||||||
|
|
||||||
async def scrape_stock_news() -> Dict[str, Any]:
|
async def scrape_stock_news() -> Dict[str, Any]:
|
||||||
"""stock-lab의 수동 뉴스 스크랩 트리거 — DB에 최신 뉴스 저장.
|
"""stock-lab의 수동 뉴스 스크랩 트리거 — DB에 최신 뉴스 저장.
|
||||||
|
|
||||||
|
|||||||
@@ -8,14 +8,22 @@ from .client import _enabled, api_call
|
|||||||
from .formatter import MessageKind, format_agent_message
|
from .formatter import MessageKind, format_agent_message
|
||||||
|
|
||||||
|
|
||||||
async def send_raw(text: str, reply_markup: Optional[dict] = None, chat_id: Optional[str] = None) -> dict:
|
async def send_raw(
|
||||||
"""가장 저수준. 원문 텍스트 그대로 전송. chat_id 생략 시 기본 TELEGRAM_CHAT_ID로."""
|
text: str,
|
||||||
|
reply_markup: Optional[dict] = None,
|
||||||
|
chat_id: Optional[str] = None,
|
||||||
|
parse_mode: str = "HTML",
|
||||||
|
) -> dict:
|
||||||
|
"""가장 저수준. 원문 텍스트 그대로 전송. chat_id 생략 시 기본 TELEGRAM_CHAT_ID로.
|
||||||
|
|
||||||
|
parse_mode: 기본 'HTML'. MarkdownV2 페이로드(예: 스크리너) 전송 시 명시 지정.
|
||||||
|
"""
|
||||||
if not _enabled():
|
if not _enabled():
|
||||||
return {"ok": False, "message_id": None}
|
return {"ok": False, "message_id": None}
|
||||||
payload = {
|
payload = {
|
||||||
"chat_id": chat_id or TELEGRAM_CHAT_ID,
|
"chat_id": chat_id or TELEGRAM_CHAT_ID,
|
||||||
"text": text,
|
"text": text,
|
||||||
"parse_mode": "HTML",
|
"parse_mode": parse_mode,
|
||||||
}
|
}
|
||||||
if reply_markup:
|
if reply_markup:
|
||||||
payload["reply_markup"] = reply_markup
|
payload["reply_markup"] = reply_markup
|
||||||
|
|||||||
177
agent-office/tests/test_stock_screener_job.py
Normal file
177
agent-office/tests/test_stock_screener_job.py
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
"""StockAgent.on_screener_schedule — 평일 16:30 KST 자동 잡 단위 테스트.
|
||||||
|
|
||||||
|
stock-lab HTTP 호출은 service_proxy mock, 텔레그램은 messaging.send_raw mock.
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
_fd, _TMP = tempfile.mkstemp(suffix=".db")
|
||||||
|
os.close(_fd)
|
||||||
|
os.unlink(_TMP)
|
||||||
|
os.environ["AGENT_OFFICE_DB_PATH"] = _TMP
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def _init_db():
|
||||||
|
import gc
|
||||||
|
gc.collect()
|
||||||
|
if os.path.exists(_TMP):
|
||||||
|
os.remove(_TMP)
|
||||||
|
from app.db import init_db
|
||||||
|
init_db()
|
||||||
|
yield
|
||||||
|
gc.collect()
|
||||||
|
|
||||||
|
|
||||||
|
def _success_body(asof="2026-05-12"):
|
||||||
|
return {
|
||||||
|
"asof": asof,
|
||||||
|
"mode": "auto",
|
||||||
|
"status": "success",
|
||||||
|
"run_id": 42,
|
||||||
|
"survivors_count": 600,
|
||||||
|
"top_n": 20,
|
||||||
|
"results": [],
|
||||||
|
"telegram_payload": {
|
||||||
|
"chat_target": "default",
|
||||||
|
"parse_mode": "MarkdownV2",
|
||||||
|
"text": "*KRX 강세주 스크리너* test body",
|
||||||
|
},
|
||||||
|
"warnings": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _holiday_body(asof="2026-05-05"):
|
||||||
|
return {
|
||||||
|
"asof": asof,
|
||||||
|
"mode": "auto",
|
||||||
|
"status": "skipped_holiday",
|
||||||
|
"run_id": None,
|
||||||
|
"survivors_count": None,
|
||||||
|
"top_n": 0,
|
||||||
|
"results": [],
|
||||||
|
"telegram_payload": None,
|
||||||
|
"warnings": [f"{asof} is a holiday — skipped"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_screener_success_sends_markdownv2_telegram():
|
||||||
|
from app.agents.stock import StockAgent
|
||||||
|
from app import service_proxy
|
||||||
|
from app.telegram import messaging
|
||||||
|
|
||||||
|
fake_snap = AsyncMock(return_value={"status": "ok"})
|
||||||
|
fake_run = AsyncMock(return_value=_success_body())
|
||||||
|
fake_send = AsyncMock(return_value={"ok": True, "message_id": 7777})
|
||||||
|
|
||||||
|
with patch.object(service_proxy, "refresh_screener_snapshot", fake_snap), \
|
||||||
|
patch.object(service_proxy, "run_stock_screener", fake_run), \
|
||||||
|
patch.object(messaging, "send_raw", fake_send):
|
||||||
|
agent = StockAgent()
|
||||||
|
asyncio.run(agent.on_screener_schedule())
|
||||||
|
|
||||||
|
fake_snap.assert_awaited_once()
|
||||||
|
fake_run.assert_awaited_once_with(mode="auto")
|
||||||
|
fake_send.assert_awaited_once()
|
||||||
|
args, kwargs = fake_send.call_args
|
||||||
|
# 첫 인자(text) 또는 kwargs로 전달
|
||||||
|
text = args[0] if args else kwargs.get("text")
|
||||||
|
assert "KRX 강세주 스크리너" in text
|
||||||
|
assert kwargs.get("parse_mode") == "MarkdownV2"
|
||||||
|
assert agent.state == "idle"
|
||||||
|
|
||||||
|
|
||||||
|
def test_screener_holiday_skips_telegram():
|
||||||
|
from app.agents.stock import StockAgent
|
||||||
|
from app import service_proxy
|
||||||
|
from app.telegram import messaging
|
||||||
|
|
||||||
|
fake_snap = AsyncMock(return_value={"status": "skipped_weekend"})
|
||||||
|
fake_run = AsyncMock(return_value=_holiday_body())
|
||||||
|
fake_send = AsyncMock(return_value={"ok": True, "message_id": 1})
|
||||||
|
|
||||||
|
with patch.object(service_proxy, "refresh_screener_snapshot", fake_snap), \
|
||||||
|
patch.object(service_proxy, "run_stock_screener", fake_run), \
|
||||||
|
patch.object(messaging, "send_raw", fake_send):
|
||||||
|
agent = StockAgent()
|
||||||
|
asyncio.run(agent.on_screener_schedule())
|
||||||
|
|
||||||
|
fake_run.assert_awaited_once()
|
||||||
|
# 휴일이면 텔레그램 미발신
|
||||||
|
fake_send.assert_not_awaited()
|
||||||
|
assert agent.state == "idle"
|
||||||
|
|
||||||
|
|
||||||
|
def test_screener_snapshot_failure_still_runs_screener():
|
||||||
|
"""스냅샷 실패는 경고만 남기고 screener 호출은 계속됨."""
|
||||||
|
from app.agents.stock import StockAgent
|
||||||
|
from app import service_proxy
|
||||||
|
from app.telegram import messaging
|
||||||
|
|
||||||
|
fake_snap = AsyncMock(side_effect=RuntimeError("snapshot upstream down"))
|
||||||
|
fake_run = AsyncMock(return_value=_success_body())
|
||||||
|
fake_send = AsyncMock(return_value={"ok": True, "message_id": 8888})
|
||||||
|
|
||||||
|
with patch.object(service_proxy, "refresh_screener_snapshot", fake_snap), \
|
||||||
|
patch.object(service_proxy, "run_stock_screener", fake_run), \
|
||||||
|
patch.object(messaging, "send_raw", fake_send):
|
||||||
|
agent = StockAgent()
|
||||||
|
asyncio.run(agent.on_screener_schedule())
|
||||||
|
|
||||||
|
fake_snap.assert_awaited_once()
|
||||||
|
fake_run.assert_awaited_once_with(mode="auto")
|
||||||
|
fake_send.assert_awaited_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_screener_run_failure_notifies_operator():
|
||||||
|
"""screener/run 실패 시 운영자 알림 텔레그램 발송."""
|
||||||
|
from app.agents.stock import StockAgent
|
||||||
|
from app import service_proxy
|
||||||
|
from app.telegram import messaging
|
||||||
|
|
||||||
|
fake_snap = AsyncMock(return_value={"status": "ok"})
|
||||||
|
fake_run = AsyncMock(side_effect=RuntimeError("stock-lab 500"))
|
||||||
|
fake_send = AsyncMock(return_value={"ok": True, "message_id": 1})
|
||||||
|
|
||||||
|
with patch.object(service_proxy, "refresh_screener_snapshot", fake_snap), \
|
||||||
|
patch.object(service_proxy, "run_stock_screener", fake_run), \
|
||||||
|
patch.object(messaging, "send_raw", fake_send):
|
||||||
|
agent = StockAgent()
|
||||||
|
asyncio.run(agent.on_screener_schedule())
|
||||||
|
|
||||||
|
# 운영자 알림 1회는 호출
|
||||||
|
assert fake_send.await_count == 1
|
||||||
|
args, kwargs = fake_send.call_args
|
||||||
|
text = args[0] if args else kwargs.get("text")
|
||||||
|
assert "스크리너 실패" in text
|
||||||
|
assert agent.state == "idle"
|
||||||
|
|
||||||
|
|
||||||
|
def test_screener_unexpected_status_treated_as_failure():
|
||||||
|
from app.agents.stock import StockAgent
|
||||||
|
from app import service_proxy
|
||||||
|
from app.telegram import messaging
|
||||||
|
|
||||||
|
fake_snap = AsyncMock(return_value={"status": "ok"})
|
||||||
|
fake_run = AsyncMock(return_value={"status": "weird", "asof": "2026-05-12"})
|
||||||
|
fake_send = AsyncMock(return_value={"ok": True, "message_id": 1})
|
||||||
|
|
||||||
|
with patch.object(service_proxy, "refresh_screener_snapshot", fake_snap), \
|
||||||
|
patch.object(service_proxy, "run_stock_screener", fake_run), \
|
||||||
|
patch.object(messaging, "send_raw", fake_send):
|
||||||
|
agent = StockAgent()
|
||||||
|
asyncio.run(agent.on_screener_schedule())
|
||||||
|
|
||||||
|
# 운영자 알림 1회 + screener payload 미발송
|
||||||
|
assert fake_send.await_count == 1
|
||||||
|
args, kwargs = fake_send.call_args
|
||||||
|
text = args[0] if args else kwargs.get("text")
|
||||||
|
assert "스크리너 실패" in text
|
||||||
@@ -5,11 +5,14 @@ from typing import List, Dict, Any, Optional
|
|||||||
|
|
||||||
from app.screener.schema import ensure_screener_schema
|
from app.screener.schema import ensure_screener_schema
|
||||||
|
|
||||||
DB_PATH = "/app/data/stock.db"
|
DB_PATH = os.environ.get("STOCK_DB_PATH", "/app/data/stock.db")
|
||||||
|
|
||||||
def _conn() -> sqlite3.Connection:
|
def _conn() -> sqlite3.Connection:
|
||||||
os.makedirs(os.path.dirname(DB_PATH), exist_ok=True)
|
db_path = os.environ.get("STOCK_DB_PATH", DB_PATH)
|
||||||
conn = sqlite3.connect(DB_PATH)
|
parent = os.path.dirname(db_path)
|
||||||
|
if parent:
|
||||||
|
os.makedirs(parent, exist_ok=True)
|
||||||
|
conn = sqlite3.connect(db_path)
|
||||||
conn.row_factory = sqlite3.Row
|
conn.row_factory = sqlite3.Row
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
|
|||||||
@@ -27,6 +27,10 @@ from .ai_summarizer import summarize_news, OllamaError
|
|||||||
|
|
||||||
app = FastAPI()
|
app = FastAPI()
|
||||||
|
|
||||||
|
# Screener 라우터 등록
|
||||||
|
from app.screener.router import router as screener_router
|
||||||
|
app.include_router(screener_router)
|
||||||
|
|
||||||
# CORS 설정 (프론트엔드 접근 허용)
|
# CORS 설정 (프론트엔드 접근 허용)
|
||||||
_cors_origins = os.getenv("CORS_ALLOW_ORIGINS", "http://localhost:3007,http://localhost:8080").split(",")
|
_cors_origins = os.getenv("CORS_ALLOW_ORIGINS", "http://localhost:3007,http://localhost:8080").split(",")
|
||||||
app.add_middleware(
|
app.add_middleware(
|
||||||
|
|||||||
@@ -3,8 +3,10 @@
|
|||||||
See docs/superpowers/specs/2026-05-12-stock-screener-board-design.md
|
See docs/superpowers/specs/2026-05-12-stock-screener-board-design.md
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Phase 2 완료 후 활성화:
|
from .engine import Screener, ScreenContext, ScreenerResult
|
||||||
# from .engine import Screener, ScreenContext, ScreenerResult
|
from .registry import NODE_REGISTRY, GATE_REGISTRY
|
||||||
# from .registry import NODE_REGISTRY, GATE_REGISTRY
|
|
||||||
|
|
||||||
__all__ = []
|
__all__ = [
|
||||||
|
"Screener", "ScreenContext", "ScreenerResult",
|
||||||
|
"NODE_REGISTRY", "GATE_REGISTRY",
|
||||||
|
]
|
||||||
|
|||||||
@@ -67,3 +67,95 @@ class ScreenContext:
|
|||||||
if self.prices.empty:
|
if self.prices.empty:
|
||||||
return pd.Series(dtype=float)
|
return pd.Series(dtype=float)
|
||||||
return self.prices.sort_values("date").groupby("ticker")["high"].last()
|
return self.prices.sort_values("date").groupby("ticker")["high"].last()
|
||||||
|
|
||||||
|
|
||||||
|
# ---- combine + Screener (Phase 2) ----
|
||||||
|
|
||||||
|
from . import position_sizer as _ps
|
||||||
|
|
||||||
|
|
||||||
|
def combine(scores: dict, weights: dict) -> pd.Series:
|
||||||
|
"""Weighted average across score nodes. ValueError if all weights = 0."""
|
||||||
|
active = {k: w for k, w in weights.items() if w > 0 and k in scores}
|
||||||
|
if not active:
|
||||||
|
raise ValueError("no active score nodes (all weights = 0)")
|
||||||
|
|
||||||
|
df = pd.DataFrame({k: scores[k] for k in active})
|
||||||
|
w = pd.Series(active)
|
||||||
|
weighted = (df.fillna(0).multiply(w, axis=1)).sum(axis=1) / w.sum()
|
||||||
|
return weighted
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ScreenerResult:
|
||||||
|
asof: dt.date
|
||||||
|
survivors_count: int
|
||||||
|
scores: dict # node name → pd.Series
|
||||||
|
weights: dict
|
||||||
|
ranked: pd.Series # ticker → total_score (sorted desc, head=top_n)
|
||||||
|
rows: list # list of dicts (for serialization)
|
||||||
|
warnings: list
|
||||||
|
|
||||||
|
|
||||||
|
class Screener:
|
||||||
|
def __init__(self, gate, score_nodes, weights: dict, node_params: dict,
|
||||||
|
gate_params: dict, top_n: int = 20, sizer_params: dict = None):
|
||||||
|
self.gate = gate
|
||||||
|
self.score_nodes = score_nodes
|
||||||
|
self.weights = weights
|
||||||
|
self.node_params = node_params
|
||||||
|
self.gate_params = gate_params
|
||||||
|
self.top_n = top_n
|
||||||
|
self.sizer_params = sizer_params or {"atr_window": 14, "atr_stop_mult": 2.0, "rr_ratio": 2.0}
|
||||||
|
|
||||||
|
def run(self, ctx: ScreenContext) -> ScreenerResult:
|
||||||
|
warnings: list = []
|
||||||
|
|
||||||
|
survivors = self.gate.filter(ctx, self.gate_params)
|
||||||
|
if len(survivors) == 0:
|
||||||
|
raise ValueError("no survivors after hygiene gate")
|
||||||
|
if len(survivors) < 100:
|
||||||
|
warnings.append(f"survivors_count={len(survivors)} < 100 — 백분위 정규화 신뢰도 낮음")
|
||||||
|
|
||||||
|
scoped = ctx.restrict(survivors)
|
||||||
|
scores: dict = {}
|
||||||
|
for n in self.score_nodes:
|
||||||
|
w = self.weights.get(n.name, 0)
|
||||||
|
if w <= 0:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
scores[n.name] = n.compute(scoped, self.node_params.get(n.name, {}))
|
||||||
|
except Exception as e:
|
||||||
|
warnings.append(f"node '{n.name}' failed: {e}")
|
||||||
|
scores[n.name] = pd.Series(0.0, index=scoped.master.index)
|
||||||
|
|
||||||
|
total = combine(scores, self.weights)
|
||||||
|
ranked = total.sort_values(ascending=False).head(self.top_n)
|
||||||
|
|
||||||
|
sizing = _ps.plan_positions(scoped, list(ranked.index), self.sizer_params)
|
||||||
|
latest_close = scoped.latest_close()
|
||||||
|
|
||||||
|
rows = []
|
||||||
|
for rank_idx, ticker in enumerate(ranked.index, start=1):
|
||||||
|
s = sizing.get(ticker, {})
|
||||||
|
row = {
|
||||||
|
"rank": rank_idx,
|
||||||
|
"ticker": ticker,
|
||||||
|
"name": str(scoped.master.loc[ticker, "name"]),
|
||||||
|
"total_score": float(ranked.loc[ticker]),
|
||||||
|
"scores": {k: float(v.get(ticker, 0.0)) for k, v in scores.items()},
|
||||||
|
"close": int(latest_close.get(ticker, 0)),
|
||||||
|
"market_cap": int(scoped.master.loc[ticker, "market_cap"] or 0),
|
||||||
|
"entry_price": s.get("entry_price"),
|
||||||
|
"stop_price": s.get("stop_price"),
|
||||||
|
"target_price": s.get("target_price"),
|
||||||
|
"atr14": s.get("atr14"),
|
||||||
|
"r_pct": s.get("r_pct"),
|
||||||
|
}
|
||||||
|
rows.append(row)
|
||||||
|
|
||||||
|
return ScreenerResult(
|
||||||
|
asof=ctx.asof, survivors_count=len(survivors),
|
||||||
|
scores=scores, weights=self.weights,
|
||||||
|
ranked=ranked, rows=rows, warnings=warnings,
|
||||||
|
)
|
||||||
|
|||||||
40
stock-lab/app/screener/nodes/vcp_lite.py
Normal file
40
stock-lab/app/screener/nodes/vcp_lite.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
"""VCP-lite — 단기/장기 일중 변동성 비율 기반 수축률."""
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
from .base import ScoreNode, percentile_rank
|
||||||
|
|
||||||
|
|
||||||
|
class VcpLite(ScoreNode):
|
||||||
|
name = "vcp_lite"
|
||||||
|
label = "VCP-lite (변동성 수축)"
|
||||||
|
default_params = {"short_window": 40, "long_window": 252}
|
||||||
|
param_schema = {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"short_window": {"type": "integer", "minimum": 10, "maximum": 120, "default": 40},
|
||||||
|
"long_window": {"type": "integer", "minimum": 60, "maximum": 504, "default": 252},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def compute(self, ctx, params: dict) -> pd.Series:
|
||||||
|
short_w = int(params.get("short_window", 40))
|
||||||
|
long_w = int(params.get("long_window", 252))
|
||||||
|
prices = ctx.prices
|
||||||
|
if prices.empty:
|
||||||
|
return pd.Series(dtype=float)
|
||||||
|
|
||||||
|
ordered = prices.sort_values("date").copy()
|
||||||
|
ordered["range_pct"] = (ordered["high"] - ordered["low"]) / ordered["close"]
|
||||||
|
|
||||||
|
def _ratio(s: pd.Series) -> float:
|
||||||
|
if len(s) < long_w:
|
||||||
|
return float("nan")
|
||||||
|
short_vol = s.tail(short_w).mean()
|
||||||
|
long_vol = s.tail(long_w).mean()
|
||||||
|
if long_vol == 0 or pd.isna(long_vol):
|
||||||
|
return float("nan")
|
||||||
|
return 1 - (short_vol / long_vol)
|
||||||
|
|
||||||
|
raw = ordered.groupby("ticker", group_keys=False)["range_pct"].apply(_ratio)
|
||||||
|
return percentile_rank(raw).fillna(50.0)
|
||||||
51
stock-lab/app/screener/position_sizer.py
Normal file
51
stock-lab/app/screener/position_sizer.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
"""ATR Wilder smoothing + entry/stop/target 계산."""
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
|
||||||
|
def compute_atr_wilder(df_one_ticker: pd.DataFrame, window: int = 14) -> float:
|
||||||
|
"""단일 종목 DataFrame(date·open·high·low·close)에 대해 Wilder ATR 마지막 값."""
|
||||||
|
g = df_one_ticker.sort_values("date").copy()
|
||||||
|
high = g["high"].astype(float)
|
||||||
|
low = g["low"].astype(float)
|
||||||
|
close = g["close"].astype(float)
|
||||||
|
prev_close = close.shift(1)
|
||||||
|
tr = pd.concat([
|
||||||
|
(high - low),
|
||||||
|
(high - prev_close).abs(),
|
||||||
|
(low - prev_close).abs(),
|
||||||
|
], axis=1).max(axis=1)
|
||||||
|
atr = tr.ewm(alpha=1 / window, adjust=False).mean()
|
||||||
|
return float(atr.iloc[-1])
|
||||||
|
|
||||||
|
|
||||||
|
def round_won(x: float) -> int:
|
||||||
|
return int(round(x))
|
||||||
|
|
||||||
|
|
||||||
|
def plan_positions(ctx, tickers: list, params: dict) -> dict:
|
||||||
|
"""각 ticker 에 대해 entry/stop/target/atr14 반환."""
|
||||||
|
atr_window = int(params.get("atr_window", 14))
|
||||||
|
stop_mult = float(params.get("atr_stop_mult", 2.0))
|
||||||
|
rr = float(params.get("rr_ratio", 2.0))
|
||||||
|
|
||||||
|
prices = ctx.prices.sort_values("date")
|
||||||
|
out: dict = {}
|
||||||
|
for t in tickers:
|
||||||
|
sub = prices[prices["ticker"] == t]
|
||||||
|
if sub.empty:
|
||||||
|
continue
|
||||||
|
close = float(sub["close"].iloc[-1])
|
||||||
|
atr14 = compute_atr_wilder(sub, window=atr_window)
|
||||||
|
entry = round_won(close * 1.005)
|
||||||
|
stop = round_won(close - stop_mult * atr14)
|
||||||
|
target = round_won(entry + rr * (entry - stop))
|
||||||
|
r_pct = (entry - stop) / entry * 100 if entry else 0.0
|
||||||
|
out[t] = {
|
||||||
|
"entry_price": entry,
|
||||||
|
"stop_price": stop,
|
||||||
|
"target_price": target,
|
||||||
|
"atr14": atr14,
|
||||||
|
"r_pct": r_pct,
|
||||||
|
}
|
||||||
|
return out
|
||||||
24
stock-lab/app/screener/registry.py
Normal file
24
stock-lab/app/screener/registry.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
"""Registry of node classes (single source of truth for /nodes endpoint)."""
|
||||||
|
|
||||||
|
from .nodes.hygiene import HygieneGate
|
||||||
|
from .nodes.foreign_buy import ForeignBuy
|
||||||
|
from .nodes.volume_surge import VolumeSurge
|
||||||
|
from .nodes.momentum import Momentum20
|
||||||
|
from .nodes.high52w import High52WProximity
|
||||||
|
from .nodes.rs_rating import RsRating
|
||||||
|
from .nodes.ma_alignment import MaAlignment
|
||||||
|
from .nodes.vcp_lite import VcpLite
|
||||||
|
|
||||||
|
NODE_REGISTRY: dict = {
|
||||||
|
"foreign_buy": ForeignBuy,
|
||||||
|
"volume_surge": VolumeSurge,
|
||||||
|
"momentum": Momentum20,
|
||||||
|
"high52w": High52WProximity,
|
||||||
|
"rs_rating": RsRating,
|
||||||
|
"ma_alignment": MaAlignment,
|
||||||
|
"vcp_lite": VcpLite,
|
||||||
|
}
|
||||||
|
|
||||||
|
GATE_REGISTRY: dict = {
|
||||||
|
"hygiene": HygieneGate,
|
||||||
|
}
|
||||||
310
stock-lab/app/screener/router.py
Normal file
310
stock-lab/app/screener/router.py
Normal file
@@ -0,0 +1,310 @@
|
|||||||
|
"""FastAPI router for /api/stock/screener/*"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sqlite3
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException
|
||||||
|
|
||||||
|
from . import schemas
|
||||||
|
from .registry import NODE_REGISTRY, GATE_REGISTRY
|
||||||
|
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/stock/screener")
|
||||||
|
|
||||||
|
|
||||||
|
import json as _json
|
||||||
|
import pathlib as _pathlib
|
||||||
|
|
||||||
|
_HOLIDAYS_CACHE = None
|
||||||
|
|
||||||
|
|
||||||
|
def _holidays():
|
||||||
|
global _HOLIDAYS_CACHE
|
||||||
|
if _HOLIDAYS_CACHE is None:
|
||||||
|
path = _pathlib.Path(__file__).resolve().parent.parent / "holidays.json"
|
||||||
|
try:
|
||||||
|
with path.open(encoding="utf-8") as f:
|
||||||
|
data = _json.load(f)
|
||||||
|
_HOLIDAYS_CACHE = set(data) if isinstance(data, list) else set(data.keys())
|
||||||
|
except FileNotFoundError:
|
||||||
|
_HOLIDAYS_CACHE = set()
|
||||||
|
return _HOLIDAYS_CACHE
|
||||||
|
|
||||||
|
|
||||||
|
def _is_holiday(d: dt.date) -> bool:
|
||||||
|
return d.weekday() >= 5 or d.isoformat() in _holidays()
|
||||||
|
|
||||||
|
|
||||||
|
def _db_path() -> str:
|
||||||
|
return os.environ.get("STOCK_DB_PATH", "/app/data/stock.db")
|
||||||
|
|
||||||
|
|
||||||
|
def _conn() -> sqlite3.Connection:
|
||||||
|
return sqlite3.connect(_db_path())
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- /nodes ----------
|
||||||
|
|
||||||
|
@router.get("/nodes", response_model=schemas.NodesResponse)
|
||||||
|
def get_nodes():
|
||||||
|
score_nodes = [
|
||||||
|
schemas.NodeMeta(
|
||||||
|
name=cls.name, label=cls.label,
|
||||||
|
default_params=cls.default_params, param_schema=cls.param_schema,
|
||||||
|
)
|
||||||
|
for cls in NODE_REGISTRY.values()
|
||||||
|
]
|
||||||
|
gate_nodes = [
|
||||||
|
schemas.NodeMeta(
|
||||||
|
name=cls.name, label=cls.label,
|
||||||
|
default_params=cls.default_params, param_schema=cls.param_schema,
|
||||||
|
)
|
||||||
|
for cls in GATE_REGISTRY.values()
|
||||||
|
]
|
||||||
|
return schemas.NodesResponse(score_nodes=score_nodes, gate_nodes=gate_nodes)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- /settings ----------
|
||||||
|
|
||||||
|
@router.get("/settings", response_model=schemas.SettingsResponse)
|
||||||
|
def get_settings():
|
||||||
|
with _conn() as c:
|
||||||
|
row = c.execute(
|
||||||
|
"SELECT weights_json, node_params_json, gate_params_json, "
|
||||||
|
"top_n, rr_ratio, atr_window, atr_stop_mult, updated_at "
|
||||||
|
"FROM screener_settings WHERE id=1"
|
||||||
|
).fetchone()
|
||||||
|
if row is None:
|
||||||
|
raise HTTPException(503, "settings not initialized")
|
||||||
|
return schemas.SettingsResponse(
|
||||||
|
weights=json.loads(row[0]),
|
||||||
|
node_params=json.loads(row[1]),
|
||||||
|
gate_params=json.loads(row[2]),
|
||||||
|
top_n=row[3], rr_ratio=row[4], atr_window=row[5], atr_stop_mult=row[6],
|
||||||
|
updated_at=row[7],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/settings", response_model=schemas.SettingsResponse)
|
||||||
|
def put_settings(body: schemas.SettingsBody):
|
||||||
|
now = dt.datetime.utcnow().isoformat()
|
||||||
|
with _conn() as c:
|
||||||
|
c.execute(
|
||||||
|
"""UPDATE screener_settings SET
|
||||||
|
weights_json=?, node_params_json=?, gate_params_json=?,
|
||||||
|
top_n=?, rr_ratio=?, atr_window=?, atr_stop_mult=?, updated_at=?
|
||||||
|
WHERE id=1""",
|
||||||
|
(
|
||||||
|
json.dumps(body.weights), json.dumps(body.node_params),
|
||||||
|
json.dumps(body.gate_params),
|
||||||
|
body.top_n, body.rr_ratio, body.atr_window, body.atr_stop_mult, now,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
c.commit()
|
||||||
|
return schemas.SettingsResponse(**body.model_dump(), updated_at=now)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- /run ----------
|
||||||
|
|
||||||
|
from . import telegram as _tg
|
||||||
|
from .engine import Screener, ScreenContext
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_asof(asof_str, conn: sqlite3.Connection) -> dt.date:
|
||||||
|
if asof_str:
|
||||||
|
return dt.date.fromisoformat(asof_str)
|
||||||
|
row = conn.execute("SELECT max(date) FROM krx_daily_prices").fetchone()
|
||||||
|
if not row or row[0] is None:
|
||||||
|
raise HTTPException(503, "no snapshot available — run /snapshot/refresh first")
|
||||||
|
return dt.date.fromisoformat(row[0])
|
||||||
|
|
||||||
|
|
||||||
|
def _load_settings(conn) -> dict:
|
||||||
|
row = conn.execute(
|
||||||
|
"SELECT weights_json,node_params_json,gate_params_json,top_n,"
|
||||||
|
"rr_ratio,atr_window,atr_stop_mult FROM screener_settings WHERE id=1"
|
||||||
|
).fetchone()
|
||||||
|
return {
|
||||||
|
"weights": json.loads(row[0]),
|
||||||
|
"node_params": json.loads(row[1]),
|
||||||
|
"gate_params": json.loads(row[2]),
|
||||||
|
"top_n": row[3],
|
||||||
|
"rr_ratio": row[4],
|
||||||
|
"atr_window": row[5],
|
||||||
|
"atr_stop_mult": row[6],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _persist_run(conn, asof, mode, weights, node_params, gate_params, top_n,
|
||||||
|
result, started_at, finished_at) -> int:
|
||||||
|
cur = conn.execute(
|
||||||
|
"""INSERT INTO screener_runs (asof,mode,status,started_at,finished_at,
|
||||||
|
weights_json,node_params_json,gate_params_json,top_n,survivors_count,telegram_sent)
|
||||||
|
VALUES (?,?,?,?,?,?,?,?,?,?,0)""",
|
||||||
|
(asof.isoformat(), mode, "success", started_at, finished_at,
|
||||||
|
json.dumps(weights), json.dumps(node_params), json.dumps(gate_params),
|
||||||
|
top_n, result.survivors_count),
|
||||||
|
)
|
||||||
|
run_id = cur.lastrowid
|
||||||
|
for row in result.rows:
|
||||||
|
conn.execute(
|
||||||
|
"""INSERT INTO screener_results (run_id,rank,ticker,name,total_score,
|
||||||
|
scores_json,close,market_cap,entry_price,stop_price,target_price,atr14)
|
||||||
|
VALUES (?,?,?,?,?,?,?,?,?,?,?,?)""",
|
||||||
|
(run_id, row["rank"], row["ticker"], row["name"], row["total_score"],
|
||||||
|
json.dumps(row["scores"]), row["close"], row["market_cap"],
|
||||||
|
row["entry_price"], row["stop_price"], row["target_price"], row["atr14"]),
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
return run_id
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/run", response_model=schemas.RunResponse)
|
||||||
|
def post_run(body: schemas.RunRequest):
|
||||||
|
from .registry import NODE_REGISTRY as _NR, GATE_REGISTRY as _GR
|
||||||
|
started_at = dt.datetime.utcnow().isoformat()
|
||||||
|
with _conn() as c:
|
||||||
|
asof = _resolve_asof(body.asof, c)
|
||||||
|
|
||||||
|
# Skipped holiday handling for mode='auto'
|
||||||
|
if body.mode == "auto" and _is_holiday(asof):
|
||||||
|
return schemas.RunResponse(
|
||||||
|
asof=asof.isoformat(), mode="auto", status="skipped_holiday",
|
||||||
|
run_id=None, survivors_count=None,
|
||||||
|
weights={}, top_n=0,
|
||||||
|
results=[], telegram_payload=None,
|
||||||
|
warnings=[f"{asof.isoformat()} is a holiday — skipped"],
|
||||||
|
)
|
||||||
|
|
||||||
|
defaults = _load_settings(c)
|
||||||
|
|
||||||
|
if body.mode == "auto":
|
||||||
|
weights = defaults["weights"]
|
||||||
|
node_params = defaults["node_params"]
|
||||||
|
gate_params = defaults["gate_params"]
|
||||||
|
top_n = defaults["top_n"]
|
||||||
|
else:
|
||||||
|
weights = body.weights if body.weights is not None else defaults["weights"]
|
||||||
|
node_params = body.node_params if body.node_params is not None else defaults["node_params"]
|
||||||
|
gate_params = body.gate_params if body.gate_params is not None else defaults["gate_params"]
|
||||||
|
top_n = body.top_n if body.top_n is not None else defaults["top_n"]
|
||||||
|
|
||||||
|
sizer_params = {
|
||||||
|
"atr_window": defaults["atr_window"],
|
||||||
|
"atr_stop_mult": defaults["atr_stop_mult"],
|
||||||
|
"rr_ratio": defaults["rr_ratio"],
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx = ScreenContext.load(c, asof)
|
||||||
|
score_nodes = [cls() for name, cls in _NR.items() if weights.get(name, 0) > 0]
|
||||||
|
gate = _GR["hygiene"]()
|
||||||
|
|
||||||
|
try:
|
||||||
|
screener = Screener(
|
||||||
|
gate=gate, score_nodes=score_nodes, weights=weights,
|
||||||
|
node_params=node_params, gate_params=gate_params,
|
||||||
|
top_n=top_n, sizer_params=sizer_params,
|
||||||
|
)
|
||||||
|
result = screener.run(ctx)
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(422, str(e))
|
||||||
|
|
||||||
|
finished_at = dt.datetime.utcnow().isoformat()
|
||||||
|
run_id = None
|
||||||
|
if body.mode in ("manual_save", "auto"):
|
||||||
|
run_id = _persist_run(c, asof, body.mode, weights, node_params, gate_params,
|
||||||
|
top_n, result, started_at, finished_at)
|
||||||
|
|
||||||
|
payload = _tg.build_telegram_payload(
|
||||||
|
asof=asof, mode=body.mode, survivors_count=result.survivors_count,
|
||||||
|
top_n=top_n, rows=result.rows, run_id=run_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
return schemas.RunResponse(
|
||||||
|
asof=asof.isoformat(), mode=body.mode, status="success",
|
||||||
|
run_id=run_id, survivors_count=result.survivors_count,
|
||||||
|
weights=weights, top_n=top_n,
|
||||||
|
results=result.rows,
|
||||||
|
telegram_payload=schemas.TelegramPayload(**payload),
|
||||||
|
warnings=result.warnings,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- /snapshot/refresh ----------
|
||||||
|
|
||||||
|
from . import snapshot as _snap
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/snapshot/refresh")
|
||||||
|
def post_snapshot_refresh(asof: Optional[str] = None):
|
||||||
|
asof_date = dt.date.fromisoformat(asof) if asof else dt.date.today()
|
||||||
|
if asof_date.weekday() >= 5:
|
||||||
|
return {"asof": asof_date.isoformat(), "status": "skipped_weekend"}
|
||||||
|
with _conn() as c:
|
||||||
|
summary = _snap.refresh_daily(c, asof_date)
|
||||||
|
return summary
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- /runs ----------
|
||||||
|
|
||||||
|
@router.get("/runs", response_model=list[schemas.RunSummary])
|
||||||
|
def list_runs(limit: int = 30):
|
||||||
|
with _conn() as c:
|
||||||
|
rows = c.execute(
|
||||||
|
"SELECT id,asof,mode,status,started_at,finished_at,top_n,"
|
||||||
|
"survivors_count,telegram_sent FROM screener_runs "
|
||||||
|
"ORDER BY asof DESC, id DESC LIMIT ?", (limit,),
|
||||||
|
).fetchall()
|
||||||
|
return [
|
||||||
|
schemas.RunSummary(
|
||||||
|
id=r[0], asof=r[1], mode=r[2], status=r[3],
|
||||||
|
started_at=r[4], finished_at=r[5], top_n=r[6],
|
||||||
|
survivors_count=r[7], telegram_sent=bool(r[8]),
|
||||||
|
)
|
||||||
|
for r in rows
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/runs/{run_id}")
|
||||||
|
def get_run(run_id: int):
|
||||||
|
with _conn() as c:
|
||||||
|
meta = c.execute(
|
||||||
|
"SELECT id,asof,mode,status,started_at,finished_at,top_n,"
|
||||||
|
"survivors_count,telegram_sent,weights_json,node_params_json,gate_params_json "
|
||||||
|
"FROM screener_runs WHERE id=?",
|
||||||
|
(run_id,),
|
||||||
|
).fetchone()
|
||||||
|
if not meta:
|
||||||
|
raise HTTPException(404, "run not found")
|
||||||
|
rows = c.execute(
|
||||||
|
"SELECT rank,ticker,name,total_score,scores_json,close,market_cap,"
|
||||||
|
"entry_price,stop_price,target_price,atr14 "
|
||||||
|
"FROM screener_results WHERE run_id=? ORDER BY rank",
|
||||||
|
(run_id,),
|
||||||
|
).fetchall()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"meta": {
|
||||||
|
"id": meta[0], "asof": meta[1], "mode": meta[2], "status": meta[3],
|
||||||
|
"started_at": meta[4], "finished_at": meta[5], "top_n": meta[6],
|
||||||
|
"survivors_count": meta[7], "telegram_sent": bool(meta[8]),
|
||||||
|
"weights": json.loads(meta[9]),
|
||||||
|
"node_params": json.loads(meta[10]),
|
||||||
|
"gate_params": json.loads(meta[11]),
|
||||||
|
},
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"rank": r[0], "ticker": r[1], "name": r[2],
|
||||||
|
"total_score": r[3], "scores": json.loads(r[4]),
|
||||||
|
"close": r[5], "market_cap": r[6],
|
||||||
|
"entry_price": r[7], "stop_price": r[8], "target_price": r[9],
|
||||||
|
"atr14": r[10],
|
||||||
|
}
|
||||||
|
for r in rows
|
||||||
|
],
|
||||||
|
}
|
||||||
85
stock-lab/app/screener/schemas.py
Normal file
85
stock-lab/app/screener/schemas.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
from typing import Literal, Optional
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class NodeMeta(BaseModel):
|
||||||
|
name: str
|
||||||
|
label: str
|
||||||
|
default_params: dict
|
||||||
|
param_schema: dict
|
||||||
|
|
||||||
|
|
||||||
|
class NodesResponse(BaseModel):
|
||||||
|
score_nodes: list[NodeMeta]
|
||||||
|
gate_nodes: list[NodeMeta]
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsBody(BaseModel):
|
||||||
|
weights: dict[str, float]
|
||||||
|
node_params: dict[str, dict] = Field(default_factory=dict)
|
||||||
|
gate_params: dict
|
||||||
|
top_n: int = 20
|
||||||
|
rr_ratio: float = 2.0
|
||||||
|
atr_window: int = 14
|
||||||
|
atr_stop_mult: float = 2.0
|
||||||
|
|
||||||
|
|
||||||
|
class SettingsResponse(SettingsBody):
|
||||||
|
updated_at: str
|
||||||
|
|
||||||
|
|
||||||
|
class RunRequest(BaseModel):
|
||||||
|
mode: Literal["preview", "manual_save", "auto"] = "preview"
|
||||||
|
asof: Optional[str] = None
|
||||||
|
weights: Optional[dict[str, float]] = None
|
||||||
|
node_params: Optional[dict[str, dict]] = None
|
||||||
|
gate_params: Optional[dict] = None
|
||||||
|
top_n: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ResultRow(BaseModel):
|
||||||
|
rank: int
|
||||||
|
ticker: str
|
||||||
|
name: str
|
||||||
|
total_score: float
|
||||||
|
scores: dict[str, float]
|
||||||
|
close: int
|
||||||
|
market_cap: int
|
||||||
|
entry_price: Optional[int] = None
|
||||||
|
stop_price: Optional[int] = None
|
||||||
|
target_price: Optional[int] = None
|
||||||
|
atr14: Optional[float] = None
|
||||||
|
r_pct: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
|
class TelegramPayload(BaseModel):
|
||||||
|
chat_target: str
|
||||||
|
parse_mode: str
|
||||||
|
text: str
|
||||||
|
|
||||||
|
|
||||||
|
class RunResponse(BaseModel):
|
||||||
|
asof: str
|
||||||
|
mode: str
|
||||||
|
status: Literal["success", "failed", "skipped_holiday"]
|
||||||
|
run_id: Optional[int] = None
|
||||||
|
survivors_count: Optional[int] = None
|
||||||
|
weights: dict[str, float]
|
||||||
|
top_n: int
|
||||||
|
results: list[ResultRow] = Field(default_factory=list)
|
||||||
|
telegram_payload: Optional[TelegramPayload] = None
|
||||||
|
warnings: list[str] = Field(default_factory=list)
|
||||||
|
error: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class RunSummary(BaseModel):
|
||||||
|
id: int
|
||||||
|
asof: str
|
||||||
|
mode: str
|
||||||
|
status: str
|
||||||
|
started_at: str
|
||||||
|
finished_at: Optional[str] = None
|
||||||
|
top_n: int
|
||||||
|
survivors_count: Optional[int] = None
|
||||||
|
telegram_sent: bool
|
||||||
72
stock-lab/app/screener/telegram.py
Normal file
72
stock-lab/app/screener/telegram.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
"""Telegram payload builder. Caller (agent-office) handles actual delivery."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
|
||||||
|
NODE_ICONS = {
|
||||||
|
"foreign_buy": "👤외",
|
||||||
|
"volume_surge": "⚡거",
|
||||||
|
"momentum": "🚀모",
|
||||||
|
"high52w": "🆙고",
|
||||||
|
"rs_rating": "💪RS",
|
||||||
|
"ma_alignment": "📈MA",
|
||||||
|
"vcp_lite": "🌀VCP",
|
||||||
|
}
|
||||||
|
|
||||||
|
PAGE_BASE = "https://gahusb.synology.me/stock/screener"
|
||||||
|
|
||||||
|
|
||||||
|
def _escape_md(s: str) -> str:
|
||||||
|
"""Minimal MarkdownV2 escape — extend if formatting breaks."""
|
||||||
|
for ch in r"\_*[]()~`>#+-=|{}.!":
|
||||||
|
s = s.replace(ch, "\\" + ch)
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def _format_won(n) -> str:
|
||||||
|
if n is None:
|
||||||
|
return "-"
|
||||||
|
return f"{int(n):,}"
|
||||||
|
|
||||||
|
|
||||||
|
def build_telegram_payload(asof: dt.date, mode: str, survivors_count: int,
|
||||||
|
top_n: int, rows: list, run_id) -> dict:
|
||||||
|
title = "*KRX 강세주 스크리너*"
|
||||||
|
header = (
|
||||||
|
f"🎯 {title} — {_escape_md(asof.isoformat())} \\({_escape_md(mode)}\\)\n"
|
||||||
|
f"통과 {survivors_count}종 / Top {top_n} / 본문 1\\-10"
|
||||||
|
)
|
||||||
|
|
||||||
|
lines = []
|
||||||
|
for r in rows[:10]:
|
||||||
|
icons = " ".join(
|
||||||
|
NODE_ICONS[name] for name, sc in r["scores"].items()
|
||||||
|
if sc >= 70 and name in NODE_ICONS
|
||||||
|
)
|
||||||
|
score_str = f"{r['total_score']:.1f}"
|
||||||
|
r_pct = r.get("r_pct")
|
||||||
|
r_pct_str = f"{r_pct:.1f}" if r_pct is not None else "-"
|
||||||
|
lines.append(
|
||||||
|
f"{r['rank']}\\. *{_escape_md(r['name'])}* `{r['ticker']}` "
|
||||||
|
f"⭐ {_escape_md(score_str)}\n"
|
||||||
|
f" {icons}\n"
|
||||||
|
f" 진입 {_format_won(r.get('entry_price'))} "
|
||||||
|
f"손절 {_format_won(r.get('stop_price'))} "
|
||||||
|
f"익절 {_format_won(r.get('target_price'))} "
|
||||||
|
f"\\(R {_escape_md(r_pct_str)}%\\)"
|
||||||
|
)
|
||||||
|
|
||||||
|
# URL은 inline link로 감싸 URL 내부 . - ? = 이스케이프 회피
|
||||||
|
link = (
|
||||||
|
f"🔗 [전체 결과·11\\~20위]({PAGE_BASE}?run_id={run_id})"
|
||||||
|
if run_id else ""
|
||||||
|
)
|
||||||
|
|
||||||
|
text = header + "\n\n" + "\n\n".join(lines) + ("\n\n" + link if link else "")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"chat_target": "default",
|
||||||
|
"parse_mode": "MarkdownV2",
|
||||||
|
"text": text,
|
||||||
|
}
|
||||||
55
stock-lab/app/test_screener_engine.py
Normal file
55
stock-lab/app/test_screener_engine.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import datetime as dt
|
||||||
|
import pandas as pd
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from app.screener.engine import ScreenContext, Screener, combine
|
||||||
|
from app.screener.nodes.hygiene import HygieneGate
|
||||||
|
from app.screener.nodes.foreign_buy import ForeignBuy
|
||||||
|
from app.screener.nodes.momentum import Momentum20
|
||||||
|
from app.screener._test_fixtures import make_master, make_prices, make_flow, make_kospi
|
||||||
|
|
||||||
|
|
||||||
|
def _ctx(master, prices, flow):
|
||||||
|
return ScreenContext(master=master, prices=prices, flow=flow,
|
||||||
|
kospi=make_kospi(days=260),
|
||||||
|
asof=dt.date(2026, 5, 12))
|
||||||
|
|
||||||
|
|
||||||
|
def test_combine_weighted_average():
|
||||||
|
scores = {
|
||||||
|
"foreign_buy": pd.Series({"A": 80, "B": 20}),
|
||||||
|
"momentum": pd.Series({"A": 60, "B": 40}),
|
||||||
|
}
|
||||||
|
weights = {"foreign_buy": 2.0, "momentum": 1.0}
|
||||||
|
out = combine(scores, weights)
|
||||||
|
# A: (80*2 + 60*1)/3 = 73.33
|
||||||
|
assert abs(out["A"] - 73.333) < 0.1
|
||||||
|
assert abs(out["B"] - 26.666) < 0.1
|
||||||
|
|
||||||
|
|
||||||
|
def test_combine_all_zero_weight_raises():
|
||||||
|
scores = {"foreign_buy": pd.Series({"A": 80})}
|
||||||
|
with pytest.raises(ValueError, match="no active"):
|
||||||
|
combine(scores, {"foreign_buy": 0})
|
||||||
|
|
||||||
|
|
||||||
|
def test_screener_run_end_to_end():
|
||||||
|
asof = dt.date(2026, 5, 12)
|
||||||
|
master = make_master(["GOOD", "SMALL"],
|
||||||
|
market_caps={"GOOD": 200_000_000_000, "SMALL": 1_000_000_000})
|
||||||
|
prices = make_prices(["GOOD", "SMALL"], days=260, asof=asof, trend_pct=0.1)
|
||||||
|
flow = make_flow(["GOOD", "SMALL"], days=260, asof=asof,
|
||||||
|
foreign_per_day={"GOOD": 100_000_000, "SMALL": 0})
|
||||||
|
ctx = _ctx(master, prices, flow)
|
||||||
|
|
||||||
|
screener = Screener(
|
||||||
|
gate=HygieneGate(),
|
||||||
|
score_nodes=[ForeignBuy(), Momentum20()],
|
||||||
|
weights={"foreign_buy": 1.0, "momentum": 1.0},
|
||||||
|
node_params={"foreign_buy": {"window_days": 5}, "momentum": {"window_days": 20}},
|
||||||
|
gate_params={**HygieneGate.default_params, "min_listed_days": 0},
|
||||||
|
top_n=10,
|
||||||
|
)
|
||||||
|
result = screener.run(ctx)
|
||||||
|
assert result.survivors_count == 1 # SMALL은 게이트 탈락
|
||||||
|
assert result.ranked.index[0] == "GOOD"
|
||||||
36
stock-lab/app/test_screener_nodes_vcp_lite.py
Normal file
36
stock-lab/app/test_screener_nodes_vcp_lite.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
import datetime as dt
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
from app.screener.engine import ScreenContext
|
||||||
|
from app.screener.nodes.vcp_lite import VcpLite
|
||||||
|
from app.screener._test_fixtures import make_master, make_prices, make_flow
|
||||||
|
|
||||||
|
|
||||||
|
def _ctx(master, prices, flow):
|
||||||
|
return ScreenContext(master=master, prices=prices, flow=flow,
|
||||||
|
kospi=pd.Series(dtype=float, name="kospi"),
|
||||||
|
asof=dt.date(2026, 5, 12))
|
||||||
|
|
||||||
|
|
||||||
|
def test_contracting_stock_scores_higher_than_expanding():
|
||||||
|
asof = dt.date(2026, 5, 12)
|
||||||
|
master = make_master(["CON", "EXP"])
|
||||||
|
prices = make_prices(["CON", "EXP"], days=260, asof=asof)
|
||||||
|
|
||||||
|
# CON: 최근 40일 변동성 축소 (high/low 좁힘)
|
||||||
|
mask_recent_con = (prices["ticker"] == "CON") & (
|
||||||
|
prices["date"] >= (asof - dt.timedelta(days=40)).isoformat()
|
||||||
|
)
|
||||||
|
prices.loc[mask_recent_con, "high"] = (prices.loc[mask_recent_con, "close"] * 1.003).astype(int)
|
||||||
|
prices.loc[mask_recent_con, "low"] = (prices.loc[mask_recent_con, "close"] * 0.997).astype(int)
|
||||||
|
|
||||||
|
# EXP: 최근 40일 변동성 확대
|
||||||
|
mask_recent_exp = (prices["ticker"] == "EXP") & (
|
||||||
|
prices["date"] >= (asof - dt.timedelta(days=40)).isoformat()
|
||||||
|
)
|
||||||
|
prices.loc[mask_recent_exp, "high"] = (prices.loc[mask_recent_exp, "close"] * 1.05).astype(int)
|
||||||
|
prices.loc[mask_recent_exp, "low"] = (prices.loc[mask_recent_exp, "close"] * 0.95).astype(int)
|
||||||
|
|
||||||
|
flow = make_flow(["CON", "EXP"], days=260, asof=asof)
|
||||||
|
out = VcpLite().compute(_ctx(master, prices, flow), VcpLite.default_params)
|
||||||
|
assert out["CON"] > out["EXP"]
|
||||||
33
stock-lab/app/test_screener_position_sizer.py
Normal file
33
stock-lab/app/test_screener_position_sizer.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import datetime as dt
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
from app.screener.engine import ScreenContext
|
||||||
|
from app.screener.position_sizer import compute_atr_wilder, plan_positions
|
||||||
|
from app.screener._test_fixtures import make_master, make_prices, make_flow
|
||||||
|
|
||||||
|
|
||||||
|
def _ctx(master, prices, flow):
|
||||||
|
return ScreenContext(master=master, prices=prices, flow=flow,
|
||||||
|
kospi=pd.Series(dtype=float, name="kospi"),
|
||||||
|
asof=dt.date(2026, 5, 12))
|
||||||
|
|
||||||
|
|
||||||
|
def test_atr_wilder_positive_and_smooth():
|
||||||
|
df = make_prices(["A"], days=30)
|
||||||
|
atr = compute_atr_wilder(df[df["ticker"] == "A"], window=14)
|
||||||
|
assert atr > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_plan_positions_returns_entry_stop_target():
|
||||||
|
asof = dt.date(2026, 5, 12)
|
||||||
|
master = make_master(["A"])
|
||||||
|
prices = make_prices(["A"], days=30, asof=asof, start_close=50000)
|
||||||
|
flow = make_flow(["A"], days=30, asof=asof)
|
||||||
|
ctx = _ctx(master, prices, flow)
|
||||||
|
sizing = plan_positions(ctx, ["A"], {"atr_window": 14, "atr_stop_mult": 2.0, "rr_ratio": 2.0})
|
||||||
|
|
||||||
|
row = sizing["A"]
|
||||||
|
assert row["entry_price"] > 0
|
||||||
|
assert row["stop_price"] < row["entry_price"]
|
||||||
|
assert row["target_price"] > row["entry_price"]
|
||||||
|
assert row["atr14"] > 0
|
||||||
154
stock-lab/app/test_screener_router.py
Normal file
154
stock-lab/app/test_screener_router.py
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
import os
|
||||||
|
import sqlite3
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
from app.screener.schema import ensure_screener_schema
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def isolated_db(tmp_path, monkeypatch):
|
||||||
|
db_path = tmp_path / "screener_router.db"
|
||||||
|
c = sqlite3.connect(db_path)
|
||||||
|
ensure_screener_schema(c)
|
||||||
|
c.close()
|
||||||
|
monkeypatch.setenv("STOCK_DB_PATH", str(db_path))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client():
|
||||||
|
from app.main import app
|
||||||
|
return TestClient(app)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_nodes_lists_7_score_and_1_gate(client):
|
||||||
|
r = client.get("/api/stock/screener/nodes")
|
||||||
|
assert r.status_code == 200
|
||||||
|
body = r.json()
|
||||||
|
assert len(body["score_nodes"]) == 7
|
||||||
|
assert len(body["gate_nodes"]) == 1
|
||||||
|
assert {n["name"] for n in body["score_nodes"]} == {
|
||||||
|
"foreign_buy", "volume_surge", "momentum",
|
||||||
|
"high52w", "rs_rating", "ma_alignment", "vcp_lite",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_settings_get_returns_defaults(client):
|
||||||
|
r = client.get("/api/stock/screener/settings")
|
||||||
|
assert r.status_code == 200
|
||||||
|
body = r.json()
|
||||||
|
assert body["weights"]["foreign_buy"] == 1.0
|
||||||
|
assert body["top_n"] == 20
|
||||||
|
|
||||||
|
|
||||||
|
def test_settings_put_then_get_round_trip(client):
|
||||||
|
new_settings = {
|
||||||
|
"weights": {"foreign_buy": 2.5, "momentum": 1.0, "volume_surge": 1.0,
|
||||||
|
"high52w": 1.2, "rs_rating": 1.2, "ma_alignment": 1.0, "vcp_lite": 0.8},
|
||||||
|
"node_params": {"foreign_buy": {"window_days": 7}},
|
||||||
|
"gate_params": {"min_market_cap_won": 100_000_000_000,
|
||||||
|
"min_avg_value_won": 500_000_000,
|
||||||
|
"min_listed_days": 60,
|
||||||
|
"skip_managed": True, "skip_preferred": True, "skip_spac": True,
|
||||||
|
"skip_halted_days": 3},
|
||||||
|
"top_n": 30,
|
||||||
|
"rr_ratio": 2.5,
|
||||||
|
"atr_window": 14,
|
||||||
|
"atr_stop_mult": 2.0,
|
||||||
|
}
|
||||||
|
r = client.put("/api/stock/screener/settings", json=new_settings)
|
||||||
|
assert r.status_code == 200
|
||||||
|
r2 = client.get("/api/stock/screener/settings")
|
||||||
|
body = r2.json()
|
||||||
|
assert body["weights"]["foreign_buy"] == 2.5
|
||||||
|
assert body["top_n"] == 30
|
||||||
|
|
||||||
|
|
||||||
|
# ---- /run tests ----
|
||||||
|
|
||||||
|
from app.screener._test_fixtures import make_master, make_prices, make_flow
|
||||||
|
|
||||||
|
|
||||||
|
def _seed_min(conn, asof_iso="2026-05-12"):
|
||||||
|
import datetime as dt
|
||||||
|
now = dt.datetime.utcnow().isoformat()
|
||||||
|
rows = [
|
||||||
|
("BIG1", "큰주식1", "KOSPI", 200_000_000_000, 0, 0, 0, None, now),
|
||||||
|
("BIG2", "큰주식2", "KOSPI", 100_000_000_000, 0, 0, 0, None, now),
|
||||||
|
("SMALL", "작은주식", "KOSPI", 1_000_000_000, 0, 0, 0, None, now),
|
||||||
|
]
|
||||||
|
for r in rows:
|
||||||
|
conn.execute("""INSERT INTO krx_master (ticker,name,market,market_cap,
|
||||||
|
is_managed,is_preferred,is_spac,listed_date,updated_at)
|
||||||
|
VALUES (?,?,?,?,?,?,?,?,?)""", r)
|
||||||
|
asof = dt.date(2026, 5, 12)
|
||||||
|
p = make_prices(["BIG1", "BIG2", "SMALL"], days=260, asof=asof)
|
||||||
|
f = make_flow(["BIG1", "BIG2", "SMALL"], days=260, asof=asof,
|
||||||
|
foreign_per_day={"BIG1": 100_000_000, "BIG2": 50_000_000, "SMALL": 0})
|
||||||
|
p.to_sql("krx_daily_prices", conn, if_exists="append", index=False)
|
||||||
|
f.to_sql("krx_flow", conn, if_exists="append", index=False)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_preview_no_save(client):
|
||||||
|
db_path = os.environ["STOCK_DB_PATH"]
|
||||||
|
c = sqlite3.connect(db_path)
|
||||||
|
_seed_min(c)
|
||||||
|
c.close()
|
||||||
|
|
||||||
|
r = client.post("/api/stock/screener/run", json={"mode": "preview", "asof": "2026-05-12"})
|
||||||
|
assert r.status_code == 200
|
||||||
|
body = r.json()
|
||||||
|
assert body["status"] == "success"
|
||||||
|
assert body["run_id"] is None
|
||||||
|
assert body["telegram_payload"] is not None
|
||||||
|
|
||||||
|
c = sqlite3.connect(db_path)
|
||||||
|
cnt = c.execute("SELECT count(*) FROM screener_runs").fetchone()[0]
|
||||||
|
assert cnt == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_manual_save_writes_row(client):
|
||||||
|
db_path = os.environ["STOCK_DB_PATH"]
|
||||||
|
c = sqlite3.connect(db_path)
|
||||||
|
_seed_min(c)
|
||||||
|
c.close()
|
||||||
|
|
||||||
|
r = client.post("/api/stock/screener/run",
|
||||||
|
json={"mode": "manual_save", "asof": "2026-05-12"})
|
||||||
|
assert r.status_code == 200
|
||||||
|
assert r.json()["run_id"] is not None
|
||||||
|
|
||||||
|
c = sqlite3.connect(db_path)
|
||||||
|
cnt = c.execute("SELECT count(*) FROM screener_runs").fetchone()[0]
|
||||||
|
assert cnt == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_runs_list_and_detail(client):
|
||||||
|
db_path = os.environ["STOCK_DB_PATH"]
|
||||||
|
c = sqlite3.connect(db_path)
|
||||||
|
_seed_min(c)
|
||||||
|
c.close()
|
||||||
|
|
||||||
|
saved = client.post(
|
||||||
|
"/api/stock/screener/run",
|
||||||
|
json={"mode": "manual_save", "asof": "2026-05-12"},
|
||||||
|
).json()
|
||||||
|
run_id = saved["run_id"]
|
||||||
|
|
||||||
|
list_r = client.get("/api/stock/screener/runs?limit=5")
|
||||||
|
assert list_r.status_code == 200
|
||||||
|
assert any(r["id"] == run_id for r in list_r.json())
|
||||||
|
|
||||||
|
detail = client.get(f"/api/stock/screener/runs/{run_id}")
|
||||||
|
assert detail.status_code == 200
|
||||||
|
assert detail.json()["meta"]["id"] == run_id
|
||||||
|
assert isinstance(detail.json()["results"], list)
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_holiday_returns_skipped(client):
|
||||||
|
# 2026-05-09는 토요일 (주말). _is_holiday 가 weekday>=5를 잡음.
|
||||||
|
r = client.post("/api/stock/screener/run",
|
||||||
|
json={"mode": "auto", "asof": "2026-05-09"})
|
||||||
|
assert r.status_code == 200
|
||||||
|
assert r.json()["status"] == "skipped_holiday"
|
||||||
51
stock-lab/app/test_screener_telegram.py
Normal file
51
stock-lab/app/test_screener_telegram.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
import datetime as dt
|
||||||
|
from app.screener.telegram import build_telegram_payload
|
||||||
|
|
||||||
|
|
||||||
|
def test_build_payload_includes_top10_and_link():
|
||||||
|
rows = [
|
||||||
|
{
|
||||||
|
"rank": i, "ticker": f"00{i:04}", "name": f"종목{i}",
|
||||||
|
"total_score": 90 - i,
|
||||||
|
"scores": {"foreign_buy": 80 + i, "volume_surge": 60, "momentum": 70,
|
||||||
|
"high52w": 75, "rs_rating": 85, "ma_alignment": 80, "vcp_lite": 30},
|
||||||
|
"close": 50000, "entry_price": 50250, "stop_price": 48500,
|
||||||
|
"target_price": 53750, "r_pct": 3.5,
|
||||||
|
}
|
||||||
|
for i in range(1, 21)
|
||||||
|
]
|
||||||
|
p = build_telegram_payload(
|
||||||
|
asof=dt.date(2026, 5, 12),
|
||||||
|
mode="auto",
|
||||||
|
survivors_count=612,
|
||||||
|
top_n=20,
|
||||||
|
rows=rows,
|
||||||
|
run_id=42,
|
||||||
|
)
|
||||||
|
assert p["parse_mode"] == "MarkdownV2"
|
||||||
|
text = p["text"]
|
||||||
|
assert "2026" in text and "05" in text and "12" in text
|
||||||
|
assert "종목1" in text
|
||||||
|
assert "종목10" in text
|
||||||
|
assert "종목11" not in text # 본문 1-10만
|
||||||
|
assert "42" in text # run_id 링크
|
||||||
|
|
||||||
|
|
||||||
|
def test_score_threshold_filters_icons():
|
||||||
|
rows = [{
|
||||||
|
"rank": 1, "ticker": "A", "name": "A주",
|
||||||
|
"total_score": 80,
|
||||||
|
"scores": {"foreign_buy": 90, "volume_surge": 50, "momentum": 70,
|
||||||
|
"high52w": 30, "rs_rating": 80, "ma_alignment": 80, "vcp_lite": 60},
|
||||||
|
"close": 50000, "entry_price": 50250, "stop_price": 48500,
|
||||||
|
"target_price": 53750, "r_pct": 3.5,
|
||||||
|
}]
|
||||||
|
p = build_telegram_payload(dt.date(2026, 5, 12), "auto", 100, 1, rows, run_id=1)
|
||||||
|
# foreign_buy(90), momentum(70), rs_rating(80), ma_alignment(80) 만 표시 (≥70)
|
||||||
|
assert "👤외" in p["text"]
|
||||||
|
assert "🚀모" in p["text"]
|
||||||
|
assert "💪RS" in p["text"]
|
||||||
|
assert "📈MA" in p["text"]
|
||||||
|
assert "⚡거" not in p["text"]
|
||||||
|
assert "🆙고" not in p["text"]
|
||||||
|
assert "🌀VCP" not in p["text"]
|
||||||
Reference in New Issue
Block a user