From 336bc90b4e545cf4a07c538e1e32223a54d72d65 Mon Sep 17 00:00:00 2001 From: gahusb Date: Tue, 7 Apr 2026 00:48:55 +0900 Subject: [PATCH] =?UTF-8?q?feat(blog-lab):=20=EB=A6=AC=EC=84=9C=EC=B9=98?= =?UTF-8?q?=20=EB=8B=A8=EA=B3=84=EC=97=90=20=EB=B8=94=EB=A1=9C=EA=B7=B8=20?= =?UTF-8?q?=EB=B3=B8=EB=AC=B8=20=ED=81=AC=EB=A1=A4=EB=A7=81=20=ED=86=B5?= =?UTF-8?q?=ED=95=A9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Claude Opus 4.6 --- blog-lab/app/main.py | 4 +- blog-lab/app/naver_search.py | 29 ++++++++++++ blog-lab/tests/test_research_crawling.py | 58 ++++++++++++++++++++++++ 3 files changed, 89 insertions(+), 2 deletions(-) create mode 100644 blog-lab/tests/test_research_crawling.py diff --git a/blog-lab/app/main.py b/blog-lab/app/main.py index c231827..b5b019b 100644 --- a/blog-lab/app/main.py +++ b/blog-lab/app/main.py @@ -16,7 +16,7 @@ from .db import ( get_dashboard_stats, get_task, create_task, update_task, ) -from .naver_search import analyze_keyword +from .naver_search import analyze_keyword_with_crawling from .content_generator import generate_trend_brief, generate_blog_post, regenerate_blog_post from .quality_reviewer import review_post @@ -65,7 +65,7 @@ def _run_research(task_id: str, keyword: str): """BackgroundTask: 네이버 검색 → 키워드 분석 → DB 저장.""" try: update_task(task_id, "processing", 30, "네이버 검색 중...") - result = analyze_keyword(keyword) + result = analyze_keyword_with_crawling(keyword) update_task(task_id, "processing", 80, "분석 결과 저장 중...") saved = add_keyword_analysis(result) diff --git a/blog-lab/app/naver_search.py b/blog-lab/app/naver_search.py index bc83a08..37b9969 100644 --- a/blog-lab/app/naver_search.py +++ b/blog-lab/app/naver_search.py @@ -1,9 +1,13 @@ """네이버 검색 API 연동 — 블로그 + 쇼핑 검색.""" +import asyncio +import logging import re import requests from typing import Any, Dict, List, Optional +logger = logging.getLogger(__name__) + from .config import NAVER_CLIENT_ID, NAVER_CLIENT_SECRET BLOG_URL = "https://openapi.naver.com/v1/search/blog.json" @@ -172,3 +176,28 @@ def analyze_keyword(keyword: str) -> Dict[str, Any]: "top_products": shop["items"][:5], "top_blogs": blog["items"][:5], } + + +def _run_enrich(top_blogs: list) -> list: + """동기 컨텍스트에서 비동기 enrich_top_blogs 실행.""" + from .web_crawler import enrich_top_blogs + try: + loop = asyncio.get_event_loop() + if loop.is_running(): + import concurrent.futures + with concurrent.futures.ThreadPoolExecutor() as pool: + return pool.submit( + asyncio.run, enrich_top_blogs(top_blogs) + ).result(timeout=60) + else: + return asyncio.run(enrich_top_blogs(top_blogs)) + except Exception as e: + logger.warning("블로그 크롤링 실패, 기존 데이터 사용: %s", e) + return top_blogs + + +def analyze_keyword_with_crawling(keyword: str) -> Dict[str, Any]: + """analyze_keyword + 상위 블로그 본문 크롤링.""" + result = analyze_keyword(keyword) + result["top_blogs"] = _run_enrich(result["top_blogs"]) + return result diff --git a/blog-lab/tests/test_research_crawling.py b/blog-lab/tests/test_research_crawling.py new file mode 100644 index 0000000..598eb4c --- /dev/null +++ b/blog-lab/tests/test_research_crawling.py @@ -0,0 +1,58 @@ +"""리서치 단계 크롤링 통합 테스트.""" +from unittest.mock import patch + + +def test_analyze_keyword_with_crawling_enriches_top_blogs(): + """analyze_keyword_with_crawling가 top_blogs에 content 필드를 추가.""" + from app.naver_search import analyze_keyword_with_crawling + + mock_blog_result = { + "total": 100, + "items": [ + {"title": "테스트 블로그", "link": "https://blog.naver.com/user1/111", + "bloggername": "유저1", "description": "설명", "postdate": "20260401"}, + ], + } + mock_shop_result = { + "total": 50, + "items": [{"title": "상품1", "lprice": 10000, "mallName": "쿠팡"}], + "price_stats": {"min": 10000, "max": 10000, "avg": 10000, "count": 1}, + } + + with patch("app.naver_search.search_blog", return_value=mock_blog_result), \ + patch("app.naver_search.search_shopping", return_value=mock_shop_result), \ + patch("app.naver_search._run_enrich", return_value=[ + {"title": "테스트 블로그", "link": "https://blog.naver.com/user1/111", + "bloggername": "유저1", "description": "설명", "postdate": "20260401", + "content": "크롤링된 본문 내용"} + ]): + result = analyze_keyword_with_crawling("테스트 키워드") + + assert "content" in result["top_blogs"][0] + assert result["top_blogs"][0]["content"] == "크롤링된 본문 내용" + + +def test_analyze_keyword_with_crawling_fallback_on_enrich_failure(): + """크롤링 실패 시 기존 데이터 유지.""" + from app.naver_search import analyze_keyword_with_crawling + + mock_blog_result = { + "total": 50, + "items": [{"title": "블로그", "link": "https://blog.naver.com/u/1", "bloggername": "유저", "description": "설명"}], + } + mock_shop_result = {"total": 10, "items": [], "price_stats": None} + + with patch("app.naver_search.search_blog", return_value=mock_blog_result), \ + patch("app.naver_search.search_shopping", return_value=mock_shop_result), \ + patch("app.naver_search._run_enrich", side_effect=Exception("크롤링 실패")): + # _run_enrich 내부에서 예외를 잡으므로 실제로는 이 테스트에서는 + # _run_enrich 자체가 예외를 던지는 상황을 시뮬레이션 + # 하지만 _run_enrich는 내부에서 잡으므로, 직접 fallback 테스트 + pass + + # _run_enrich 자체 fallback 테스트 + from app.naver_search import _run_enrich + original_blogs = [{"title": "원본", "link": "https://blog.naver.com/u/1"}] + with patch("app.web_crawler.enrich_top_blogs", side_effect=Exception("fail")): + result = _run_enrich(original_blogs) + assert result == original_blogs # fallback으로 원본 반환