From 94db1da0450228e29d55a43f61d0c32bfc057423 Mon Sep 17 00:00:00 2001 From: gahusb Date: Mon, 26 Jan 2026 03:14:46 +0900 Subject: [PATCH] feat: add stock indices scraping and update healthcheck --- scripts/healthcheck.sh | 8 ++++- stock-lab/app/main.py | 7 ++++- stock-lab/app/scraper.py | 66 ++++++++++++++++++++++++++++++++++++++-- 3 files changed, 76 insertions(+), 5 deletions(-) diff --git a/scripts/healthcheck.sh b/scripts/healthcheck.sh index 8a189f3..0e1f2c0 100644 --- a/scripts/healthcheck.sh +++ b/scripts/healthcheck.sh @@ -41,7 +41,13 @@ echo "--- 2. Travel Proxy Service ---" check_url "Travel Regions" "http://localhost:19000/api/travel/regions" echo "" -echo "--- 3. Frontend (Nginx) ---" +echo "--- 3. Stock Lab Service ---" +check_url "Stock Health" "http://localhost:18500/health" +check_url "Stock News" "http://localhost:18500/api/stock/news" +check_url "Stock Indices" "http://localhost:18500/api/stock/indices" + +echo "" +echo "--- 4. Frontend (Nginx) ---" # 외부 포트 8080으로 접속 테스트 check_url "Frontend Home" "http://localhost:8080" # Nginx가 Backend로 잘 프록시하는지 체크 (실제 존재하는 api 호출) diff --git a/stock-lab/app/main.py b/stock-lab/app/main.py index 5756537..4c8ef16 100644 --- a/stock-lab/app/main.py +++ b/stock-lab/app/main.py @@ -3,7 +3,7 @@ from fastapi import FastAPI from apscheduler.schedulers.background import BackgroundScheduler from .db import init_db, save_articles, get_latest_articles -from .scraper import fetch_market_news +from .scraper import fetch_market_news, fetch_major_indices app = FastAPI() scheduler = BackgroundScheduler(timezone=os.getenv("TZ", "Asia/Seoul")) @@ -36,6 +36,11 @@ def get_news(limit: int = 20): """최신 주식 뉴스 조회""" return get_latest_articles(limit) +@app.get("/api/stock/indices") +def get_indices(): + """주요 지표(KOSPI 등) 실시간 크롤링 조회""" + return fetch_major_indices() + @app.post("/api/admin/stock/scrap") def trigger_scrap(): """수동 스크랩 트리거""" diff --git a/stock-lab/app/scraper.py b/stock-lab/app/scraper.py index 9d1ec62..726db43 100644 --- a/stock-lab/app/scraper.py +++ b/stock-lab/app/scraper.py @@ -72,7 +72,67 @@ def fetch_market_news() -> List[Dict[str, str]]: def fetch_major_indices() -> Dict[str, Any]: """ - KOSPI, KOSDAQ, USD/KRW 등 주요 지표 (네이버 금융 홈) + KOSPI, KOSDAQ, KOSPI200 등 주요 지표 (네이버 금융 홈) """ - # ... (추후 구현, 일단 빈 딕셔너리) - return {} + url = "https://finance.naver.com/" + try: + headers = { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36" + } + resp = requests.get(url, headers=headers, timeout=5) + resp.raise_for_status() + + soup = BeautifulSoup(resp.content, "html.parser", from_encoding="cp949") + + indices = [] + # 네이버 금융 홈 상단 'section_stock_market' 내부 + # top_kospi, top_kosdaq, top_kpsi200 + + targets = [ + {"key": "KOSPI", "selector": ".kospi_area"}, + {"key": "KOSDAQ", "selector": ".kosdaq_area"}, + {"key": "KOSPI200", "selector": ".kospi200_area"}, + ] + + for t in targets: + area = soup.select_one(t["selector"]) + if not area: + continue + + # 현재가 + num_tag = area.select_one(".num") + value = num_tag.get_text(strip=True) if num_tag else "" + + # 등락 (num2) -> 화살표, 부호 확인 필요 + # num2 (상승), num3 (하락) 클래스가 유동적일 수 있음 + # .num2 (수치), .num3 (퍼센트) + # 보통 .nk (수치), .per (퍼센트) 로 나뉨 + + change_val_tag = area.select_one(".num2") + change_pct_tag = area.select_one(".num3") + + change_val = change_val_tag.get_text(strip=True) if change_val_tag else "" + change_pct = change_pct_tag.get_text(strip=True) if change_pct_tag else "" + + # 상승/하락 부호 처리 (화살표 텍스트나 클래스 보고 판단해야 함) + # 단순 텍스트로는 '상승 10.5' 처럼 들어있을 수 있음 + # 여기서는 단순 텍스트값 그대로 리턴 + + # 방향(상승/하락) 클래스 확인 + direction = "" + if area.select_one(".bu_p"): direction = "red" # 상승 + elif area.select_one(".bu_m"): direction = "blue" # 하락 + + indices.append({ + "name": t["key"], + "value": value, + "change_value": change_val, + "change_percent": change_pct, + "direction": direction + }) + + return {"indices": indices, "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S")} + + except Exception as e: + print(f"[StockLab] Indices scraping failed: {e}") + return {"indices": [], "error": str(e)}