P2: print→logging 전환, 포트폴리오 중복 제거, Docker healthcheck 추가

- backend/main.py: logging 모듈 도입, print() 제거
- stock-lab/main.py: print() → logger 전환, _calc_portfolio_totals 공용 함수 추출
- stock-lab/scraper.py: logging 모듈 도입, print() 제거
- docker-compose.yml: 전 서비스 healthcheck 블록 추가 (30s 간격, 3회 재시도)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-04-03 01:45:39 +09:00
parent 6a1a2c4552
commit 819c35adfc
4 changed files with 58 additions and 17 deletions

View File

@@ -1,10 +1,14 @@
import os
import time
import logging
from typing import Optional, List, Dict, Any, Tuple
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from apscheduler.schedulers.background import BackgroundScheduler
logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(name)s] %(levelname)s %(message)s")
logger = logging.getLogger("lotto-backend")
from .db import (
init_db, get_draw, get_latest_draw, get_all_draw_numbers,
save_recommendation_dedup, list_recommendations_ex, delete_recommendation,
@@ -52,7 +56,7 @@ _PERF_CACHE_TTL = 3600 # 1시간 (스케줄러 미실행 상황 대비 폴백)
def _refresh_perf_cache() -> None:
_PERF_CACHE["data"] = get_recommendation_performance()
_PERF_CACHE["at"] = time.time()
print("[PerfCache] 성과 통계 캐시 갱신")
logger.info("성과 통계 캐시 갱신")
@app.on_event("startup")
@@ -86,7 +90,7 @@ def on_startup():
target = latest["drw_no"] + 1
report = generate_weekly_report(draws, target)
save_weekly_report(target, _json.dumps(report, ensure_ascii=False))
print(f"[WeeklyReport] {target}회차 리포트 저장 완료")
logger.info(f"{target}회차 리포트 저장 완료")
scheduler.add_job(_save_weekly_report_job, "cron", day_of_week="sat", hour=9, minute=0)

View File

@@ -16,6 +16,11 @@ services:
- LOTTO_LATEST_URL=${LOTTO_LATEST_URL:-https://smok95.github.io/lotto/results/latest.json}
volumes:
- ${RUNTIME_PATH}/data:/app/data
healthcheck:
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/health')"]
interval: 30s
timeout: 5s
retries: 3
stock-lab:
build:
@@ -36,6 +41,11 @@ services:
- CORS_ALLOW_ORIGINS=${CORS_ALLOW_ORIGINS:-http://localhost:3007,http://localhost:8080}
volumes:
- ${STOCK_DATA_PATH:-./data/stock}:/app/data
healthcheck:
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/health')"]
interval: 30s
timeout: 5s
retries: 3
music-lab:
build:
@@ -51,6 +61,11 @@ services:
- CORS_ALLOW_ORIGINS=${CORS_ALLOW_ORIGINS:-http://localhost:3007,http://localhost:8080}
volumes:
- ${MUSIC_DATA_PATH:-./data/music}:/app/data
healthcheck:
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/health')"]
interval: 30s
timeout: 5s
retries: 3
travel-proxy:
build: ./travel-proxy
@@ -58,7 +73,7 @@ services:
restart: unless-stopped
user: "${PUID}:${PGID}"
ports:
- "19000:8000" # 내부 확인용
- "19000:8000"
environment:
- TZ=${TZ:-Asia/Seoul}
- TRAVEL_ROOT=${TRAVEL_ROOT:-/data/travel}
@@ -69,6 +84,11 @@ services:
volumes:
- ${PHOTO_PATH}:/data/travel:ro
- ${RUNTIME_PATH}/travel-thumbs:/data/thumbs:rw
healthcheck:
test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/health')"]
interval: 30s
timeout: 5s
retries: 3
frontend:
image: nginx:alpine
@@ -86,13 +106,18 @@ services:
- ${MUSIC_DATA_PATH:-./data/music}:/data/music:ro
extra_hosts:
- "host.docker.internal:host-gateway"
healthcheck:
test: ["CMD", "wget", "-q", "--spider", "http://localhost:80/"]
interval: 30s
timeout: 5s
retries: 3
deployer:
build: ./deployer
container_name: webpage-deployer
restart: unless-stopped
ports:
- "127.0.0.1:19010:9000" # localhost만 허용 (nginx /webhook 프록시 경유)
- "127.0.0.1:19010:9000"
environment:
- WEBHOOK_SECRET=${WEBHOOK_SECRET}
volumes:

View File

@@ -66,10 +66,22 @@ def is_market_open(d: date_type) -> bool:
return d.weekday() < 5 and d.strftime("%Y-%m-%d") not in _HOLIDAYS
def _calc_portfolio_totals(items, prices):
"""포트폴리오 총 매입/평가금 계산 (snapshot과 API에서 공용)"""
total_buy = 0
total_eval = 0
for item in items:
buy_amount = item["avg_price"] * item["quantity"]
current_price = prices.get(item["ticker"], item["avg_price"])
total_buy += buy_amount
total_eval += current_price * item["quantity"]
return total_buy, total_eval
def save_daily_snapshot():
today = date_type.today()
if not is_market_open(today):
print(f"[Snapshot] {today} 휴장일 — 스킵")
logger.info(f"Snapshot: {today} 휴장일 — 스킵")
return
today_str = today.strftime("%Y-%m-%d")
@@ -80,16 +92,13 @@ def save_daily_snapshot():
if items:
tickers = list({item["ticker"] for item in items})
prices = get_current_prices(tickers)
total_eval = sum(
prices.get(item["ticker"], item["avg_price"]) * item["quantity"]
for item in items
)
_, total_eval = _calc_portfolio_totals(items, prices)
else:
total_eval = 0
total_assets = total_eval + total_cash
upsert_asset_snapshot(today_str, total_eval, total_cash, total_assets)
print(f"[Snapshot] {today_str} 저장 완료: eval={total_eval}, cash={total_cash}, total={total_assets}")
logger.info(f"Snapshot: {today_str} 저장 eval={total_eval}, cash={total_cash}, total={total_assets}")
@app.on_event("startup")
def on_startup():
@@ -108,7 +117,7 @@ def on_startup():
scheduler.start()
def run_scraping_job():
print("[StockLab] Starting news scraping...")
logger.info("뉴스 스크래핑 시작")
# 1. 국내
articles_kr = fetch_market_news()
@@ -119,7 +128,7 @@ def run_scraping_job():
# count_world = save_articles(articles_world)
count_world = 0
print(f"[StockLab] Saved {count_kr} domestic, {count_world} overseas articles.")
logger.info(f"스크래핑 완료: 국내 {count_kr}건, 해외 {count_world}")
@app.get("/health")
def health():

View File

@@ -1,8 +1,11 @@
import logging
import requests
from bs4 import BeautifulSoup
from typing import List, Dict, Any
import time
logger = logging.getLogger("stock-lab.scraper")
# 네이버 파이낸스 주요 뉴스
NAVER_FINANCE_NEWS_URL = "https://finance.naver.com/news/mainnews.naver"
# 해외증시 뉴스 (모바일 API 사용)
@@ -73,7 +76,7 @@ def fetch_market_news() -> List[Dict[str, str]]:
return articles
except Exception as e:
print(f"[StockLab] Scraping failed: {e}")
logger.error(f"국내 뉴스 스크래핑 실패: {e}")
return []
def fetch_overseas_news() -> List[Dict[str, str]]:
@@ -126,7 +129,7 @@ def fetch_overseas_news() -> List[Dict[str, str]]:
return articles
except Exception as e:
print(f"[StockLab] Overseas news failed: {e}")
logger.error(f"해외 뉴스 스크래핑 실패: {e}")
return []
def fetch_major_indices() -> Dict[str, Any]:
@@ -237,7 +240,7 @@ def fetch_major_indices() -> Dict[str, Any]:
})
except Exception as e:
print(f"[StockLab] World indices failed: {e}")
logger.error(f"해외 지수 스크래핑 실패: {e}")
# --- 환율 (USD/KRW) ---
try:
@@ -269,10 +272,10 @@ def fetch_major_indices() -> Dict[str, Any]:
"type": "exchange"
})
except Exception as e:
print(f"[StockLab] Exchange rate failed: {e}")
logger.error(f"환율 스크래핑 실패: {e}")
return {"indices": indices, "crawled_at": time.strftime("%Y-%m-%d %H:%M:%S")}
except Exception as e:
print(f"[StockLab] Indices scraping failed: {e}")
logger.error(f"지수 스크래핑 전체 실패: {e}")
return {"indices": [], "error": str(e)}