diff --git a/stock-lab/app/scraper.py b/stock-lab/app/scraper.py index b933052..f2aa605 100644 --- a/stock-lab/app/scraper.py +++ b/stock-lab/app/scraper.py @@ -186,9 +186,9 @@ def fetch_major_indices() -> Dict[str, Any]: soup_world = BeautifulSoup(resp_world.content, "html.parser", from_encoding="cp949") world_targets = [ - {"key": "DJI", "sym": "DJI@DJI"}, - {"key": "NAS", "sym": "NAS@IXIC"}, - {"key": "SPI", "sym": "SPI@SPX"}, + {"key": "DJI", "name": "다우산업", "sym": "DJI@DJI"}, + {"key": "NAS", "name": "나스닥", "sym": "NAS@IXIC"}, + {"key": "SPI", "name": "S&P500", "sym": "SPI@SPX"}, ] for wt in world_targets: @@ -228,7 +228,7 @@ def fetch_major_indices() -> Dict[str, Any]: direction = "blue" indices.append({ - "name": wt["key"], + "name": wt["name"], # 한글 이름 사용 "value": value, "change_value": change_val, "change_percent": change_pct, @@ -238,6 +238,38 @@ def fetch_major_indices() -> Dict[str, Any]: except Exception as e: print(f"[StockLab] World indices failed: {e}") + + # --- 환율 (USD/KRW) --- + try: + resp_ex = requests.get("https://finance.naver.com/marketindex/", headers=headers, timeout=5) + soup_ex = BeautifulSoup(resp_ex.content, "html.parser", from_encoding="cp949") + + usd_item = soup_ex.select_one("#exchangeList li.on > a.head.usd") + if usd_item: + value = usd_item.select_one(".value").get_text(strip=True) + change_val = usd_item.select_one(".change").get_text(strip=True) + + # 방향 (blind 텍스트: 상승, 하락) + direction = "" + blind_txt = usd_item.select_one(".blind").get_text(strip=True) + if "상승" in blind_txt: direction = "red" + elif "하락" in blind_txt: direction = "blue" + + # 등락률은 리스트에는 안나오고 상세에 나오지만, 여기선 생략하거나 계산 가능. + # 일단 UI 통일성을 위해 빈값 혹은 계산된 값 등 처리. + # 네이버 메인 환율 영역엔 등락률이 텍스트로 바로 안보임 (title 속성 등에 있을수 있음). + # 여기서는 간단히 값만 처리. + + indices.append({ + "name": "원달러 환율", + "value": value, + "change_value": change_val, + "change_percent": "", # 메인 리스트에서 바로 안보임 + "direction": direction, + "type": "exchange" + }) + except Exception as e: + print(f"[StockLab] Exchange rate failed: {e}") return {"indices": indices, "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S")}