feat(music-lab): 다중 트랙 컴파일 백엔드 (FFmpeg concat+crossfade → MP4)
- db.py: compile_jobs 테이블 추가 + CRUD 5종 (create/get/list/update/delete)
- compiler.py: acrossfade 필터 체인 + 그라디언트 배경 + MP4 렌더링 워커
- main.py: /api/music/compile POST·GET·DELETE + /api/music/compiles GET + /api/music/compile/{id}/export GET
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
121
music-lab/app/compiler.py
Normal file
121
music-lab/app/compiler.py
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from .db import get_compile_job, get_track_file_path, get_track_by_id, update_compile_job
|
||||||
|
|
||||||
|
VIDEO_DATA_DIR = os.getenv("VIDEO_DATA_DIR", "/app/data/videos")
|
||||||
|
|
||||||
|
|
||||||
|
def _build_concat_cmd(file_paths: list[str], crossfade_sec: float, output_path: str) -> list:
|
||||||
|
"""FFmpeg command: N audio files → single audio with acrossfade."""
|
||||||
|
n = len(file_paths)
|
||||||
|
if n == 1:
|
||||||
|
# Single track: just copy
|
||||||
|
return ["ffmpeg", "-y", "-i", file_paths[0], "-c:a", "libmp3lame", "-q:a", "2", output_path]
|
||||||
|
|
||||||
|
cmd = []
|
||||||
|
for fp in file_paths:
|
||||||
|
cmd += ["-i", fp]
|
||||||
|
|
||||||
|
# Build acrossfade filter chain
|
||||||
|
filter_parts = []
|
||||||
|
prev = "0"
|
||||||
|
for i in range(1, n):
|
||||||
|
out_label = f"a{i:02d}"
|
||||||
|
filter_parts.append(f"[{prev}][{i}]acrossfade=d={crossfade_sec}:c1=tri:c2=tri[{out_label}]")
|
||||||
|
prev = out_label
|
||||||
|
|
||||||
|
filter_str = ";".join(filter_parts)
|
||||||
|
|
||||||
|
return (
|
||||||
|
["ffmpeg", "-y"]
|
||||||
|
+ cmd
|
||||||
|
+ ["-filter_complex", filter_str, "-map", f"[{prev}]", "-c:a", "libmp3lame", "-q:a", "2", output_path]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _make_gradient_bg(width: int, height: int, output_path: str) -> None:
|
||||||
|
"""Simple dark gradient background image via FFmpeg."""
|
||||||
|
subprocess.run(
|
||||||
|
[
|
||||||
|
"ffmpeg", "-y",
|
||||||
|
"-f", "lavfi",
|
||||||
|
"-i", f"color=c=0x111827:size={width}x{height}:rate=1",
|
||||||
|
"-frames:v", "1",
|
||||||
|
output_path,
|
||||||
|
],
|
||||||
|
check=True, capture_output=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_audio_duration(path: str) -> float:
|
||||||
|
"""Return duration in seconds via ffprobe."""
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
["ffprobe", "-v", "error", "-show_entries", "format=duration",
|
||||||
|
"-of", "default=noprint_wrappers=1:nokey=1", path],
|
||||||
|
capture_output=True, text=True, check=True,
|
||||||
|
)
|
||||||
|
return float(result.stdout.strip())
|
||||||
|
except Exception:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
|
||||||
|
def run_compile(job_id: int) -> None:
|
||||||
|
"""Main compile worker — called as BackgroundTask."""
|
||||||
|
job = get_compile_job(job_id)
|
||||||
|
if not job:
|
||||||
|
return
|
||||||
|
|
||||||
|
update_compile_job(job_id, status="rendering")
|
||||||
|
|
||||||
|
try:
|
||||||
|
track_ids = job["track_ids"]
|
||||||
|
if not track_ids:
|
||||||
|
raise ValueError("트랙이 선택되지 않았습니다")
|
||||||
|
|
||||||
|
# Resolve file paths
|
||||||
|
file_paths = []
|
||||||
|
for tid in track_ids:
|
||||||
|
fp = get_track_file_path(tid)
|
||||||
|
if not fp or not os.path.exists(fp):
|
||||||
|
raise ValueError(f"트랙 파일 없음 (id={tid})")
|
||||||
|
file_paths.append(fp)
|
||||||
|
|
||||||
|
out_dir = os.path.join(VIDEO_DATA_DIR, f"compile_{job_id}")
|
||||||
|
os.makedirs(out_dir, exist_ok=True)
|
||||||
|
|
||||||
|
# Step 1: concat audio
|
||||||
|
audio_path = os.path.join(out_dir, "audio.mp3")
|
||||||
|
concat_cmd = _build_concat_cmd(file_paths, job["crossfade_sec"], audio_path)
|
||||||
|
subprocess.run(concat_cmd, check=True, capture_output=True)
|
||||||
|
|
||||||
|
duration = _get_audio_duration(audio_path)
|
||||||
|
|
||||||
|
# Step 2: background image
|
||||||
|
bg_path = os.path.join(out_dir, "bg.jpg")
|
||||||
|
_make_gradient_bg(1920, 1080, bg_path)
|
||||||
|
|
||||||
|
# Step 3: audio + bg → MP4
|
||||||
|
output_path = os.path.join(out_dir, "output.mp4")
|
||||||
|
subprocess.run(
|
||||||
|
[
|
||||||
|
"ffmpeg", "-y",
|
||||||
|
"-loop", "1", "-i", bg_path,
|
||||||
|
"-i", audio_path,
|
||||||
|
"-c:v", "libx264", "-tune", "stillimage", "-preset", "fast",
|
||||||
|
"-c:a", "aac", "-b:a", "192k",
|
||||||
|
"-pix_fmt", "yuv420p",
|
||||||
|
"-shortest",
|
||||||
|
output_path,
|
||||||
|
],
|
||||||
|
check=True, capture_output=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
update_compile_job(job_id, status="done", output_path=output_path, duration_sec=duration)
|
||||||
|
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
err = (e.stderr or b"").decode()[-300:]
|
||||||
|
update_compile_job(job_id, status="failed", error=err)
|
||||||
|
except Exception as e:
|
||||||
|
update_compile_job(job_id, status="failed", error=str(e))
|
||||||
@@ -169,6 +169,21 @@ def init_db() -> None:
|
|||||||
)
|
)
|
||||||
""")
|
""")
|
||||||
|
|
||||||
|
# ── compile_jobs 테이블 ───────────────────────────────────────────
|
||||||
|
conn.execute("""
|
||||||
|
CREATE TABLE IF NOT EXISTS compile_jobs (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
title TEXT NOT NULL DEFAULT '',
|
||||||
|
track_ids TEXT NOT NULL DEFAULT '[]',
|
||||||
|
crossfade_sec REAL NOT NULL DEFAULT 3.0,
|
||||||
|
status TEXT NOT NULL DEFAULT 'pending',
|
||||||
|
output_path TEXT,
|
||||||
|
duration_sec REAL,
|
||||||
|
error TEXT,
|
||||||
|
created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now'))
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
# ── music_tasks CRUD ──────────────────────────────────────────────────────────
|
# ── music_tasks CRUD ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
@@ -707,3 +722,72 @@ def get_trend_reports(limit: int = 10) -> list:
|
|||||||
}
|
}
|
||||||
for r in rows
|
for r in rows
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# ── Compile Jobs ─────────────────────────────────────────
|
||||||
|
|
||||||
|
def create_compile_job(title: str, track_ids: list, crossfade_sec: float) -> int:
|
||||||
|
with _conn() as conn:
|
||||||
|
cur = conn.execute(
|
||||||
|
"INSERT INTO compile_jobs (title, track_ids, crossfade_sec) VALUES (?,?,?)",
|
||||||
|
(title, json.dumps(track_ids), crossfade_sec),
|
||||||
|
)
|
||||||
|
return cur.lastrowid
|
||||||
|
|
||||||
|
|
||||||
|
def get_compile_jobs() -> list:
|
||||||
|
with _conn() as conn:
|
||||||
|
rows = conn.execute(
|
||||||
|
"SELECT id, title, track_ids, crossfade_sec, status, duration_sec, created_at "
|
||||||
|
"FROM compile_jobs ORDER BY created_at DESC LIMIT 50"
|
||||||
|
).fetchall()
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"id": r["id"],
|
||||||
|
"title": r["title"],
|
||||||
|
"track_ids": json.loads(r["track_ids"]),
|
||||||
|
"crossfade_sec": r["crossfade_sec"],
|
||||||
|
"status": r["status"],
|
||||||
|
"duration_sec": r["duration_sec"],
|
||||||
|
"created_at": r["created_at"],
|
||||||
|
}
|
||||||
|
for r in rows
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def get_compile_job(job_id: int) -> Optional[Dict[str, Any]]:
|
||||||
|
with _conn() as conn:
|
||||||
|
r = conn.execute(
|
||||||
|
"SELECT * FROM compile_jobs WHERE id = ?", (job_id,)
|
||||||
|
).fetchone()
|
||||||
|
if not r:
|
||||||
|
return None
|
||||||
|
return {
|
||||||
|
"id": r["id"],
|
||||||
|
"title": r["title"],
|
||||||
|
"track_ids": json.loads(r["track_ids"]),
|
||||||
|
"crossfade_sec": r["crossfade_sec"],
|
||||||
|
"status": r["status"],
|
||||||
|
"output_path": r["output_path"],
|
||||||
|
"duration_sec": r["duration_sec"],
|
||||||
|
"error": r["error"],
|
||||||
|
"created_at": r["created_at"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def update_compile_job(job_id: int, **kwargs) -> None:
|
||||||
|
allowed = {"status", "output_path", "duration_sec", "error"}
|
||||||
|
fields = {k: v for k, v in kwargs.items() if k in allowed}
|
||||||
|
if not fields:
|
||||||
|
return
|
||||||
|
set_clause = ", ".join(f"{k} = ?" for k in fields)
|
||||||
|
with _conn() as conn:
|
||||||
|
conn.execute(
|
||||||
|
f"UPDATE compile_jobs SET {set_clause} WHERE id = ?",
|
||||||
|
(*fields.values(), job_id),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_compile_job(job_id: int) -> None:
|
||||||
|
with _conn() as conn:
|
||||||
|
conn.execute("DELETE FROM compile_jobs WHERE id = ?", (job_id,))
|
||||||
|
|||||||
@@ -19,7 +19,10 @@ from .db import (
|
|||||||
update_revenue_record, delete_revenue_record, get_revenue_dashboard,
|
update_revenue_record, delete_revenue_record, get_revenue_dashboard,
|
||||||
get_market_trends as _get_market_trends,
|
get_market_trends as _get_market_trends,
|
||||||
get_latest_trend_report, get_trend_reports as _get_trend_reports,
|
get_latest_trend_report, get_trend_reports as _get_trend_reports,
|
||||||
|
create_compile_job, get_compile_jobs, get_compile_job,
|
||||||
|
update_compile_job, delete_compile_job,
|
||||||
)
|
)
|
||||||
|
from .compiler import run_compile
|
||||||
from .market import ingest_trends, get_suggestions
|
from .market import ingest_trends, get_suggestions
|
||||||
from .local_provider import run_local_generation
|
from .local_provider import run_local_generation
|
||||||
from .suno_provider import (
|
from .suno_provider import (
|
||||||
@@ -783,6 +786,66 @@ def delete_project(project_id: int):
|
|||||||
return {"ok": True}
|
return {"ok": True}
|
||||||
|
|
||||||
|
|
||||||
|
# ── Compile Jobs ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
class CompileRequest(BaseModel):
|
||||||
|
title: str = ""
|
||||||
|
track_ids: list[int]
|
||||||
|
crossfade_sec: float = 3.0
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/music/compile")
|
||||||
|
def create_compile(req: CompileRequest, background_tasks: BackgroundTasks):
|
||||||
|
if not req.track_ids:
|
||||||
|
raise HTTPException(status_code=400, detail="track_ids 필수")
|
||||||
|
if not (0.5 <= req.crossfade_sec <= 15):
|
||||||
|
raise HTTPException(status_code=400, detail="crossfade_sec: 0.5~15")
|
||||||
|
job_id = create_compile_job(req.title, req.track_ids, req.crossfade_sec)
|
||||||
|
background_tasks.add_task(run_compile, job_id)
|
||||||
|
return {"id": job_id, "status": "rendering"}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/music/compiles")
|
||||||
|
def list_compiles():
|
||||||
|
return {"jobs": get_compile_jobs()}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/music/compile/{job_id}")
|
||||||
|
def get_compile(job_id: int):
|
||||||
|
job = get_compile_job(job_id)
|
||||||
|
if not job:
|
||||||
|
raise HTTPException(status_code=404, detail="Not found")
|
||||||
|
return job
|
||||||
|
|
||||||
|
|
||||||
|
@app.delete("/api/music/compile/{job_id}")
|
||||||
|
def delete_compile(job_id: int):
|
||||||
|
job = get_compile_job(job_id)
|
||||||
|
if not job:
|
||||||
|
raise HTTPException(status_code=404, detail="Not found")
|
||||||
|
if job.get("output_path"):
|
||||||
|
out_dir = os.path.dirname(job["output_path"])
|
||||||
|
if os.path.exists(out_dir):
|
||||||
|
shutil.rmtree(out_dir, ignore_errors=True)
|
||||||
|
delete_compile_job(job_id)
|
||||||
|
return {"ok": True}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/music/compile/{job_id}/export")
|
||||||
|
def export_compile(job_id: int):
|
||||||
|
job = get_compile_job(job_id)
|
||||||
|
if not job or job["status"] != "done":
|
||||||
|
raise HTTPException(status_code=404, detail="Not ready")
|
||||||
|
out_dir = os.path.dirname(job["output_path"])
|
||||||
|
rel = os.path.relpath(job["output_path"], os.getenv("VIDEO_DATA_DIR", "/app/data/videos"))
|
||||||
|
mp4_url = f"/media/videos/{rel}"
|
||||||
|
return {
|
||||||
|
"mp4_url": mp4_url,
|
||||||
|
"duration_sec": job["duration_sec"],
|
||||||
|
"title": job["title"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# ── 수익화 추적 API ───────────────────────────────────────────────────────────
|
# ── 수익화 추적 API ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
@app.get("/api/music/revenue/dashboard")
|
@app.get("/api/music/revenue/dashboard")
|
||||||
|
|||||||
Reference in New Issue
Block a user