feat: add stock-lab service for financial news scraping and analysis
This commit is contained in:
54
stock-lab/app/db.py
Normal file
54
stock-lab/app/db.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import sqlite3
|
||||
import os
|
||||
import hashlib
|
||||
from typing import List, Dict, Any
|
||||
|
||||
DB_PATH = "/app/data/stock.db"
|
||||
|
||||
def _conn() -> sqlite3.Connection:
|
||||
os.makedirs(os.path.dirname(DB_PATH), exist_ok=True)
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
conn.row_factory = sqlite3.Row
|
||||
return conn
|
||||
|
||||
def init_db():
|
||||
with _conn() as conn:
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS articles (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
hash TEXT UNIQUE NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
link TEXT,
|
||||
summary TEXT,
|
||||
press TEXT,
|
||||
pub_date TEXT,
|
||||
crawled_at TEXT
|
||||
)
|
||||
""")
|
||||
conn.execute("CREATE INDEX IF NOT EXISTS idx_articles_crawled ON articles(crawled_at DESC)")
|
||||
|
||||
def save_articles(articles: List[Dict[str, str]]) -> int:
|
||||
count = 0
|
||||
with _conn() as conn:
|
||||
for a in articles:
|
||||
# 중복 체크용 해시 (제목+링크)
|
||||
unique_str = f"{a['title']}|{a['link']}"
|
||||
h = hashlib.md5(unique_str.encode()).hexdigest()
|
||||
|
||||
try:
|
||||
conn.execute("""
|
||||
INSERT INTO articles (hash, title, link, summary, press, pub_date, crawled_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
""", (h, a['title'], a['link'], a['summary'], a['press'], a['date'], a['crawled_at']))
|
||||
count += 1
|
||||
except sqlite3.IntegrityError:
|
||||
pass # 이미 존재함
|
||||
return count
|
||||
|
||||
def get_latest_articles(limit: int = 20) -> List[Dict[str, Any]]:
|
||||
with _conn() as conn:
|
||||
rows = conn.execute(
|
||||
"SELECT * FROM articles ORDER BY crawled_at DESC, id DESC LIMIT ?",
|
||||
(limit,)
|
||||
).fetchall()
|
||||
return [dict(r) for r in rows]
|
||||
Reference in New Issue
Block a user