import os import uvicorn import multiprocessing from fastapi import FastAPI, Request from pydantic import BaseModel from contextlib import asynccontextmanager from modules.config import Config from modules.services.ollama import OllamaManager from modules.services.kis import KISClient from modules.services.news import NewsCollector from modules.services.telegram import TelegramMessenger from modules.bot import AutoTradingBot from modules.utils.process_tracker import ProcessTracker, ProcessWatchdog from modules.services.telegram_bot.runner import run_telegram_bot_standalone # 전역 객체 messenger = TelegramMessenger() ai_agent = None kis_client = None news_collector = None watchdog = None def run_trading_bot(ipc_lock, command_queue, shutdown_event): """트레이딩 봇 실행 래퍼""" ProcessTracker.register("Trading Bot Main") bot = AutoTradingBot(ipc_lock=ipc_lock, command_queue=command_queue, shutdown_event=shutdown_event) bot.loop() @asynccontextmanager async def lifespan(app: FastAPI): global ai_agent, kis_client, news_collector, watchdog # 1. 설정 검증 Config.validate() # 2. 좀비 프로세스 정리 try: ProcessTracker.check_and_kill_zombies() ProcessTracker.clear() ProcessTracker.register("Main Server (Uvicorn Worker)") except Exception: pass # 3. 전역 객체 초기화 ai_agent = OllamaManager() kis_client = KISClient() news_collector = NewsCollector() # 4. 공유 리소스 생성 ipc_lock = multiprocessing.Lock() command_queue = multiprocessing.Queue() shutdown_event = multiprocessing.Event() print("[Server] Starting AI Trading Bot & Telegram Bot...") # 5. 자식 프로세스 생성 bot_args = (ipc_lock, command_queue, shutdown_event) telegram_args = (ipc_lock, command_queue, shutdown_event) bot_process = multiprocessing.Process( target=run_trading_bot, args=bot_args) bot_process.start() telegram_process = multiprocessing.Process( target=run_telegram_bot_standalone, args=telegram_args) telegram_process.start() # 6. Watchdog 시작 watchdog = ProcessWatchdog(shutdown_event=shutdown_event) watchdog.watch("Trading Bot", bot_process, run_trading_bot, bot_args) watchdog.watch("Telegram Bot", telegram_process, run_telegram_bot_standalone, telegram_args) watchdog.start() messenger.send_message("[Server Started] Windows AI Server Online.") yield # [Shutdown] print("[Server] Shutting down...") shutdown_event.set() if watchdog: watchdog.stop() # 자식 프로세스 종료 for name in ["Trading Bot", "Telegram Bot"]: proc = watchdog.get_process(name) if watchdog else None if proc and proc.is_alive(): print(f" - Stopping {name}...") proc.join(timeout=5) if proc.is_alive(): proc.terminate() proc.join(timeout=3) # SharedMemory 정리 try: from multiprocessing.shared_memory import SharedMemory shm = SharedMemory(name=Config.SHM_NAME, create=False) shm.close() shm.unlink() except Exception: pass messenger.send_message("[Server Stopped] Server Shutting Down.") app = FastAPI(title="Windows AI Stock Server", lifespan=lifespan) @app.middleware("http") async def log_requests(request: Request, call_next): print(f"[HTTP] {request.method} {request.url}") response = await call_next(request) return response class ManualOrderRequest(BaseModel): ticker: str action: str quantity: int @app.get("/") def index(): vram = 0 if ai_agent: vram = ai_agent.check_vram() return { "status": "online", "gpu_vram": round(vram, 2), "service": "Windows AI Server (Refactored)" } @app.get("/trade/balance") @app.get("/api/trade/balance") async def get_balance(): if not kis_client: return {"error": "Server not initialized"} return kis_client.get_balance() @app.post("/trade/order") @app.post("/api/trade/order") async def manual_order(req: ManualOrderRequest): ticker = req.ticker qty = req.quantity action = req.action.upper() result = "No Action" if action == "BUY": result = kis_client.buy_stock(ticker, qty) elif action == "SELL": result = kis_client.sell_stock(ticker, qty) return {"status": "executed", "kis_result": result} @app.post("/analyze/portfolio") @app.post("/api/analyze/portfolio") async def analyze_portfolio(): balance = kis_client.get_balance() news = news_collector.get_market_news() prompt = f""" Analyze this portfolio with recent news: Portfolio: {balance} News: {news} Response in Korean. """ analysis = ai_agent.request_inference(prompt) return {"analysis": analysis} if __name__ == "__main__": # 서버 시작 시 좀비 프로세스 정리 try: ProcessTracker.check_and_kill_zombies() except Exception: pass print("[Server] Starting Windows AI Server...") uvicorn.run("main_server:app", host="0.0.0.0", port=8000, reload=False)