From 20c5268def688e83f579498214001347448f7ca8 Mon Sep 17 00:00:00 2001 From: gahusb Date: Sun, 10 May 2026 15:50:42 +0900 Subject: [PATCH] =?UTF-8?q?fix(music-lab):=20pipeline=20media=20URL?= =?UTF-8?q?=EC=97=90=20cache-buster=20=E2=80=94=20regen=20=EC=8B=9C=20?= =?UTF-8?q?=EB=B8=8C=EB=9D=BC=EC=9A=B0=EC=A0=80/=ED=85=94=EB=A0=88?= =?UTF-8?q?=EA=B7=B8=EB=9E=A8=20=EC=BA=90=EC=8B=9C=20=EC=9A=B0=ED=9A=8C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- music-lab/app/db.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/music-lab/app/db.py b/music-lab/app/db.py index de3462d..744d463 100644 --- a/music-lab/app/db.py +++ b/music-lab/app/db.py @@ -1030,6 +1030,15 @@ def _parse_pipeline_row(row: sqlite3.Row) -> Dict[str, Any]: d["metadata"] = json.loads(d["metadata_json"]) if d.get("review_json"): d["review"] = json.loads(d["review_json"]) + + # Cache-bust media URLs — append ?v={updated_at_compact} so browsers/telegram fetch fresh after regen + updated_at = d.get("updated_at", "") or "" + if updated_at: + cache_key = updated_at.replace(":", "").replace("-", "").replace("T", "").replace(".", "") + for url_key in ("cover_url", "video_url", "thumbnail_url"): + url = d.get(url_key) + if url and "?" not in url: + d[url_key] = f"{url}?v={cache_key}" return d