feat(blog-lab): 리서치 단계에 블로그 본문 크롤링 통합

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-04-07 00:48:55 +09:00
parent 2980807587
commit 336bc90b4e
3 changed files with 89 additions and 2 deletions

View File

@@ -16,7 +16,7 @@ from .db import (
get_dashboard_stats, get_dashboard_stats,
get_task, create_task, update_task, get_task, create_task, update_task,
) )
from .naver_search import analyze_keyword from .naver_search import analyze_keyword_with_crawling
from .content_generator import generate_trend_brief, generate_blog_post, regenerate_blog_post from .content_generator import generate_trend_brief, generate_blog_post, regenerate_blog_post
from .quality_reviewer import review_post from .quality_reviewer import review_post
@@ -65,7 +65,7 @@ def _run_research(task_id: str, keyword: str):
"""BackgroundTask: 네이버 검색 → 키워드 분석 → DB 저장.""" """BackgroundTask: 네이버 검색 → 키워드 분석 → DB 저장."""
try: try:
update_task(task_id, "processing", 30, "네이버 검색 중...") update_task(task_id, "processing", 30, "네이버 검색 중...")
result = analyze_keyword(keyword) result = analyze_keyword_with_crawling(keyword)
update_task(task_id, "processing", 80, "분석 결과 저장 중...") update_task(task_id, "processing", 80, "분석 결과 저장 중...")
saved = add_keyword_analysis(result) saved = add_keyword_analysis(result)

View File

@@ -1,9 +1,13 @@
"""네이버 검색 API 연동 — 블로그 + 쇼핑 검색.""" """네이버 검색 API 연동 — 블로그 + 쇼핑 검색."""
import asyncio
import logging
import re import re
import requests import requests
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
logger = logging.getLogger(__name__)
from .config import NAVER_CLIENT_ID, NAVER_CLIENT_SECRET from .config import NAVER_CLIENT_ID, NAVER_CLIENT_SECRET
BLOG_URL = "https://openapi.naver.com/v1/search/blog.json" BLOG_URL = "https://openapi.naver.com/v1/search/blog.json"
@@ -172,3 +176,28 @@ def analyze_keyword(keyword: str) -> Dict[str, Any]:
"top_products": shop["items"][:5], "top_products": shop["items"][:5],
"top_blogs": blog["items"][:5], "top_blogs": blog["items"][:5],
} }
def _run_enrich(top_blogs: list) -> list:
"""동기 컨텍스트에서 비동기 enrich_top_blogs 실행."""
from .web_crawler import enrich_top_blogs
try:
loop = asyncio.get_event_loop()
if loop.is_running():
import concurrent.futures
with concurrent.futures.ThreadPoolExecutor() as pool:
return pool.submit(
asyncio.run, enrich_top_blogs(top_blogs)
).result(timeout=60)
else:
return asyncio.run(enrich_top_blogs(top_blogs))
except Exception as e:
logger.warning("블로그 크롤링 실패, 기존 데이터 사용: %s", e)
return top_blogs
def analyze_keyword_with_crawling(keyword: str) -> Dict[str, Any]:
"""analyze_keyword + 상위 블로그 본문 크롤링."""
result = analyze_keyword(keyword)
result["top_blogs"] = _run_enrich(result["top_blogs"])
return result

View File

@@ -0,0 +1,58 @@
"""리서치 단계 크롤링 통합 테스트."""
from unittest.mock import patch
def test_analyze_keyword_with_crawling_enriches_top_blogs():
"""analyze_keyword_with_crawling가 top_blogs에 content 필드를 추가."""
from app.naver_search import analyze_keyword_with_crawling
mock_blog_result = {
"total": 100,
"items": [
{"title": "테스트 블로그", "link": "https://blog.naver.com/user1/111",
"bloggername": "유저1", "description": "설명", "postdate": "20260401"},
],
}
mock_shop_result = {
"total": 50,
"items": [{"title": "상품1", "lprice": 10000, "mallName": "쿠팡"}],
"price_stats": {"min": 10000, "max": 10000, "avg": 10000, "count": 1},
}
with patch("app.naver_search.search_blog", return_value=mock_blog_result), \
patch("app.naver_search.search_shopping", return_value=mock_shop_result), \
patch("app.naver_search._run_enrich", return_value=[
{"title": "테스트 블로그", "link": "https://blog.naver.com/user1/111",
"bloggername": "유저1", "description": "설명", "postdate": "20260401",
"content": "크롤링된 본문 내용"}
]):
result = analyze_keyword_with_crawling("테스트 키워드")
assert "content" in result["top_blogs"][0]
assert result["top_blogs"][0]["content"] == "크롤링된 본문 내용"
def test_analyze_keyword_with_crawling_fallback_on_enrich_failure():
"""크롤링 실패 시 기존 데이터 유지."""
from app.naver_search import analyze_keyword_with_crawling
mock_blog_result = {
"total": 50,
"items": [{"title": "블로그", "link": "https://blog.naver.com/u/1", "bloggername": "유저", "description": "설명"}],
}
mock_shop_result = {"total": 10, "items": [], "price_stats": None}
with patch("app.naver_search.search_blog", return_value=mock_blog_result), \
patch("app.naver_search.search_shopping", return_value=mock_shop_result), \
patch("app.naver_search._run_enrich", side_effect=Exception("크롤링 실패")):
# _run_enrich 내부에서 예외를 잡으므로 실제로는 이 테스트에서는
# _run_enrich 자체가 예외를 던지는 상황을 시뮬레이션
# 하지만 _run_enrich는 내부에서 잡으므로, 직접 fallback 테스트
pass
# _run_enrich 자체 fallback 테스트
from app.naver_search import _run_enrich
original_blogs = [{"title": "원본", "link": "https://blog.naver.com/u/1"}]
with patch("app.web_crawler.enrich_top_blogs", side_effect=Exception("fail")):
result = _run_enrich(original_blogs)
assert result == original_blogs # fallback으로 원본 반환