Files
web-page-backend/packs-lab/tests/test_routes.py
gahusb 1e5e1bcdff fix(packs-lab): sign-link path traversal — startswith → relative_to (CODE_REVIEW F1)
str(abs_path).startswith(str(PACK_HOST_DIR))는 trailing slash가 없어
sibling 경로(/foo/packs ↔ /foo/packs_evil)를 통과시켜 DSM API에 잘못된
호스트 경로를 전달할 수 있었음. Path.relative_to 기반으로 컴포넌트 단위
엄격 검증으로 교체. test_sign_link_rejects_sibling_path 회귀 테스트
추가 (RED → GREEN 검증).
2026-05-17 13:50:22 +09:00

559 lines
20 KiB
Python

"""routes.py 통합 테스트 (DSM, supabase는 mock)."""
import os
import time
import uuid
from datetime import datetime, timezone
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from fastapi.testclient import TestClient
# 테스트용 환경변수 (auth import 전)
os.environ["BACKEND_HMAC_SECRET"] = "test-secret-32-bytes-XXXXXXXXXXXX"
os.environ["DSM_HOST"] = "https://test.synology.me:5001"
os.environ["DSM_USER"] = "test"
os.environ["DSM_PASS"] = "test"
os.environ["SUPABASE_URL"] = "https://placeholder.supabase.co"
os.environ["SUPABASE_SERVICE_KEY"] = "placeholder-key"
from app import auth # noqa: E402
from app.main import app # noqa: E402
client = TestClient(app)
def _signed(body: bytes) -> dict:
ts = str(int(time.time()))
sig = auth._sign(ts.encode() + b"." + body)
return {"X-Timestamp": ts, "X-Signature": sig, "Content-Type": "application/json"}
def test_health():
r = client.get("/health")
assert r.status_code == 200
assert r.json()["service"] == "packs-lab"
@patch("app.routes.create_share_link", new_callable=AsyncMock)
def test_sign_link_success(mock_share):
mock_share.return_value = ("https://test.synology.me:5001/d/s/abc", datetime.now(timezone.utc))
# Windows에서는 절대경로 resolve 결과가 C:\... 로 prefix되므로 PACK_HOST_DIR도 동일하게 패치
# sign-link는 PACK_HOST_DIR(NAS 호스트경로) 기준으로 검증함.
from pathlib import Path
abs_resolved = Path("/volume1/docker/webpage/media/packs/master/x.mp4").resolve()
base_resolved = Path(str(abs_resolved).rsplit("master", 1)[0].rstrip("\\/"))
with patch("app.routes.PACK_HOST_DIR", base_resolved):
body = b'{"file_path":"/volume1/docker/webpage/media/packs/master/x.mp4","expires_in_seconds":14400}'
r = client.post("/api/packs/sign-link", content=body, headers=_signed(body))
assert r.status_code == 200
assert "url" in r.json()
def test_sign_link_no_hmac():
r = client.post("/api/packs/sign-link", json={"file_path": "/x"})
assert r.status_code == 401
def test_sign_link_path_outside_base():
body = b'{"file_path":"/etc/passwd","expires_in_seconds":14400}'
r = client.post("/api/packs/sign-link", content=body, headers=_signed(body))
assert r.status_code == 400
def test_sign_link_rejects_sibling_path():
"""PACK_HOST_DIR='/foo/packs' 일 때 '/foo/packs_evil/x.mp4' 같이 prefix만
통과하는 sibling 경로는 거부해야 한다 (CODE_REVIEW F1, path traversal 변형).
기존 str.startswith 방식은 trailing slash가 없어 sibling 경로를 통과시킴.
relative_to 기반 검증으로 교체되어야 통과한다.
"""
import json as _json
from pathlib import Path
base_resolved = Path("/foo/packs").resolve()
# base의 자식이 아닌 sibling 경로 (예: /foo/packs_evil/...)
sibling_posix = (base_resolved.parent / f"{base_resolved.name}_evil" / "x.mp4").as_posix()
with patch("app.routes.PACK_HOST_DIR", base_resolved):
body = _json.dumps(
{"file_path": sibling_posix, "expires_in_seconds": 14400}
).encode()
r = client.post("/api/packs/sign-link", content=body, headers=_signed(body))
assert r.status_code == 400, (
f"sibling 경로 '{sibling_posix}'가 허용됨 (status={r.status_code}) "
f"— path traversal 가능성"
)
def test_upload_invalid_token():
r = client.post(
"/api/packs/upload",
files={"file": ("x.pdf", b"abc", "application/pdf")},
headers={"Authorization": "Bearer invalid"},
)
assert r.status_code == 401
def test_upload_no_auth():
r = client.post(
"/api/packs/upload",
files={"file": ("x.pdf", b"abc", "application/pdf")},
)
assert r.status_code == 401
@patch("app.routes._supabase")
def test_list_success(mock_sb):
mock_table = MagicMock()
mock_table.select.return_value = mock_table
mock_table.is_.return_value = mock_table
mock_table.order.return_value = mock_table
mock_table.execute.return_value = MagicMock(data=[
{
"id": str(uuid.uuid4()),
"min_tier": "starter",
"label": "테스트",
"file_path": "/volume1/.../x.pdf",
"filename": "x.pdf",
"size_bytes": 100,
"sort_order": 0,
"uploaded_at": "2026-05-02T12:00:00+00:00",
}
])
mock_sb.return_value.table.return_value = mock_table
body = b''
r = client.get("/api/packs/list", headers=_signed(body))
assert r.status_code == 200
assert len(r.json()) == 1
def test_mint_token_hmac_required():
"""HMAC 헤더 누락 → 401."""
body = {"tier": "pro", "label": "샘플", "filename": "x.zip", "size_bytes": 1024}
resp = client.post("/api/packs/admin/mint-token", json=body)
assert resp.status_code == 401
def test_mint_token_returns_valid_token():
"""발급된 token이 verify_upload_token으로 통과해야 한다."""
from app.auth import verify_upload_token
body = {"tier": "pro", "label": "샘플", "filename": "test.zip", "size_bytes": 2048}
import json as _json
body_bytes = _json.dumps(body).encode()
resp = client.post("/api/packs/admin/mint-token", content=body_bytes, headers=_signed(body_bytes))
assert resp.status_code == 200
data = resp.json()
assert "token" in data and "expires_at" in data and "jti" in data
payload = verify_upload_token(data["token"])
assert payload["tier"] == "pro"
assert payload["label"] == "샘플"
assert payload["filename"] == "test.zip"
assert payload["size_bytes"] == 2048
assert payload["jti"] == data["jti"]
def test_mint_token_invalid_filename():
"""허용 외 확장자 → 400."""
body = {"tier": "pro", "label": "샘플", "filename": "x.exe", "size_bytes": 1024}
import json as _json
body_bytes = _json.dumps(body).encode()
resp = client.post("/api/packs/admin/mint-token", content=body_bytes, headers=_signed(body_bytes))
assert resp.status_code == 400
def test_upload_size_mismatch(tmp_path, monkeypatch):
"""토큰 size_bytes ≠ 실제 파일 크기 → 400 + 파일 정리됨."""
monkeypatch.setattr("app.routes.PACK_BASE_DIR", tmp_path)
token = auth.mint_upload_token({
"tier": "pro",
"label": "샘플",
"filename": "size_mismatch_test.zip",
"size_bytes": 999,
"jti": str(uuid.uuid4()),
"expires_at": int(time.time()) + 1800,
})
test_client = TestClient(app)
resp = test_client.post(
"/api/packs/upload",
files={"file": ("size_mismatch_test.zip", b"hello")},
headers={"Authorization": f"Bearer {token}"},
)
assert resp.status_code == 400
assert "크기" in resp.json()["detail"]
# 파일이 정리되었는지 확인 (평면 구조)
assert not (tmp_path / "size_mismatch_test.zip").exists()
def test_upload_jti_replay(tmp_path, monkeypatch):
"""같은 jti 토큰 두 번 → 두 번째 409."""
monkeypatch.setattr("app.routes.PACK_BASE_DIR", tmp_path)
fake_supabase = MagicMock()
fake_supabase.table.return_value.insert.return_value.execute.return_value = MagicMock(
data=[{"uploaded_at": "2026-05-05T12:00:00+00:00"}]
)
unique_jti = f"replay-jti-unique-{uuid.uuid4()}"
token = auth.mint_upload_token({
"tier": "pro",
"label": "샘플",
"filename": "replay_test.zip",
"size_bytes": 5,
"jti": unique_jti,
"expires_at": int(time.time()) + 1800,
})
with patch("app.routes._supabase", return_value=fake_supabase):
test_client = TestClient(app)
resp1 = test_client.post(
"/api/packs/upload",
files={"file": ("replay_test.zip", b"hello")},
headers={"Authorization": f"Bearer {token}"},
)
assert resp1.status_code == 200
# 2차 — 동일 토큰 재사용 → 409
resp2 = test_client.post(
"/api/packs/upload",
files={"file": ("replay_test.zip", b"world")},
headers={"Authorization": f"Bearer {token}"},
)
assert resp2.status_code == 409
def test_delete_soft_deletes():
"""DELETE 시 supabase update에 deleted_at ISO timestamp 들어가야 한다."""
fake_supabase = MagicMock()
fake_supabase.table.return_value.update.return_value.eq.return_value.execute.return_value = MagicMock(
data=[{"id": "abc"}]
)
body_bytes = b""
headers = _signed(body_bytes)
with patch("app.routes._supabase", return_value=fake_supabase):
test_client = TestClient(app)
resp = test_client.delete("/api/packs/abc", headers=headers)
assert resp.status_code == 200
update_call = fake_supabase.table.return_value.update.call_args
update_kwargs = update_call.args[0]
assert "deleted_at" in update_kwargs
assert "T" in update_kwargs["deleted_at"] # ISO 8601
def test_list_filters_deleted():
"""list 라우트가 supabase에 is_(deleted_at, null) 필터를 적용하는지 검증."""
fake_rows = [{
"id": "11111111-1111-1111-1111-111111111111",
"min_tier": "pro", "label": "샘플",
"file_path": "/volume1/docker/webpage/media/packs/pro/a.zip",
"filename": "a.zip", "size_bytes": 1024, "sort_order": 0,
"uploaded_at": "2026-05-05T12:00:00+00:00",
}]
fake_supabase = MagicMock()
chain = fake_supabase.table.return_value.select.return_value
chain.is_.return_value.order.return_value.order.return_value.execute.return_value = MagicMock(data=fake_rows)
body_bytes = b""
headers = _signed(body_bytes)
with patch("app.routes._supabase", return_value=fake_supabase):
test_client = TestClient(app)
resp = test_client.get("/api/packs/list", headers=headers)
assert resp.status_code == 200
fake_supabase.table.return_value.select.return_value.is_.assert_called_with("deleted_at", "null")
def _mint(filename: str, size: int, jti: str = None) -> str:
return auth.mint_upload_token({
"tier": "pro",
"label": "샘플",
"filename": filename,
"size_bytes": size,
"jti": jti or str(uuid.uuid4()),
"expires_at": int(time.time()) + 1800,
})
def test_chunk_upload_full_flow(tmp_path, monkeypatch):
"""init → chunk(0) → chunk(N) → complete 정상 흐름."""
monkeypatch.setattr("app.routes.PACK_BASE_DIR", tmp_path)
from pathlib import Path
monkeypatch.setattr("app.routes.PACK_HOST_DIR", Path("/volume1/host"))
fake_supabase = MagicMock()
fake_supabase.table.return_value.insert.return_value.execute.return_value = MagicMock(
data=[{"uploaded_at": "2026-05-12T00:00:00+00:00"}]
)
payload = b"a" * 100 + b"b" * 50 # 150 bytes total
chunk1 = payload[:100]
chunk2 = payload[100:]
jti = str(uuid.uuid4())
token = _mint("chunk_full.zip", len(payload), jti=jti)
headers = {"Authorization": f"Bearer {token}"}
with patch("app.routes._supabase", return_value=fake_supabase):
test_client = TestClient(app)
# init
r = test_client.post("/api/packs/upload/init", headers=headers)
assert r.status_code == 200, r.text
sid = r.json()["session_id"]
assert sid == jti
assert r.json()["expected_size"] == 150
# chunk 1 (offset=0)
r = test_client.put(
f"/api/packs/upload/{sid}/chunk?offset=0",
content=chunk1,
headers=headers,
)
assert r.status_code == 200, r.text
assert r.json()["written"] == 100
# chunk 2 (offset=100)
r = test_client.put(
f"/api/packs/upload/{sid}/chunk?offset=100",
content=chunk2,
headers=headers,
)
assert r.status_code == 200
assert r.json()["written"] == 150
# complete
r = test_client.post(f"/api/packs/upload/{sid}/complete", headers=headers)
assert r.status_code == 200, r.text
body = r.json()
assert body["filename"] == "chunk_full.zip"
assert body["size_bytes"] == 150
assert body["file_path"] == "/volume1/host/chunk_full.zip" or body["file_path"].endswith("chunk_full.zip")
# 파일이 최종 위치로 이동했고 session은 정리됨
assert (tmp_path / "chunk_full.zip").read_bytes() == payload
assert not (tmp_path / ".uploads" / sid).exists()
def test_chunk_upload_offset_mismatch(tmp_path, monkeypatch):
"""잘못된 offset → 409 + X-Current-Offset 헤더."""
monkeypatch.setattr("app.routes.PACK_BASE_DIR", tmp_path)
jti = str(uuid.uuid4())
token = _mint("offset_mismatch.zip", 100, jti=jti)
headers = {"Authorization": f"Bearer {token}"}
test_client = TestClient(app)
r = test_client.post("/api/packs/upload/init", headers=headers)
assert r.status_code == 200
sid = r.json()["session_id"]
# 잘못된 offset (10인데 0이어야 함)
r = test_client.put(
f"/api/packs/upload/{sid}/chunk?offset=10",
content=b"x" * 10,
headers=headers,
)
assert r.status_code == 409
assert r.headers.get("X-Current-Offset") == "0"
def test_chunk_upload_status(tmp_path, monkeypatch):
"""status로 현재 written 조회."""
monkeypatch.setattr("app.routes.PACK_BASE_DIR", tmp_path)
jti = str(uuid.uuid4())
token = _mint("status_check.zip", 50, jti=jti)
headers = {"Authorization": f"Bearer {token}"}
test_client = TestClient(app)
r = test_client.post("/api/packs/upload/init", headers=headers)
sid = r.json()["session_id"]
# 빈 상태
r = test_client.get(f"/api/packs/upload/{sid}/status", headers=headers)
assert r.status_code == 200
assert r.json()["written"] == 0
assert r.json()["expected_size"] == 50
# 일부 업로드 후
test_client.put(
f"/api/packs/upload/{sid}/chunk?offset=0",
content=b"x" * 20,
headers=headers,
)
r = test_client.get(f"/api/packs/upload/{sid}/status", headers=headers)
assert r.json()["written"] == 20
def test_chunk_upload_abort(tmp_path, monkeypatch):
"""DELETE → session 디렉토리 정리."""
monkeypatch.setattr("app.routes.PACK_BASE_DIR", tmp_path)
jti = str(uuid.uuid4())
token = _mint("abort_test.zip", 30, jti=jti)
headers = {"Authorization": f"Bearer {token}"}
test_client = TestClient(app)
test_client.post("/api/packs/upload/init", headers=headers)
test_client.put(
f"/api/packs/upload/{jti}/chunk?offset=0",
content=b"y" * 10,
headers=headers,
)
assert (tmp_path / ".uploads" / jti).exists()
r = test_client.delete(f"/api/packs/upload/{jti}", headers=headers)
assert r.status_code == 200
assert not (tmp_path / ".uploads" / jti).exists()
def test_chunk_upload_wrong_token(tmp_path, monkeypatch):
"""다른 jti의 token으로 chunk 호출 → 403."""
monkeypatch.setattr("app.routes.PACK_BASE_DIR", tmp_path)
# session A 시작
jti_a = str(uuid.uuid4())
token_a = _mint("wrong_token_a.zip", 30, jti=jti_a)
headers_a = {"Authorization": f"Bearer {token_a}"}
test_client = TestClient(app)
test_client.post("/api/packs/upload/init", headers=headers_a)
# session B의 token으로 session A의 chunk 호출
jti_b = str(uuid.uuid4())
token_b = _mint("wrong_token_b.zip", 30, jti=jti_b)
headers_b = {"Authorization": f"Bearer {token_b}"}
r = test_client.put(
f"/api/packs/upload/{jti_a}/chunk?offset=0",
content=b"z" * 10,
headers=headers_b,
)
assert r.status_code == 403
def test_chunk_upload_complete_incomplete(tmp_path, monkeypatch):
"""expected_size 미달 상태에서 complete 호출 → 400."""
monkeypatch.setattr("app.routes.PACK_BASE_DIR", tmp_path)
jti = str(uuid.uuid4())
token = _mint("incomplete.zip", 100, jti=jti)
headers = {"Authorization": f"Bearer {token}"}
test_client = TestClient(app)
test_client.post("/api/packs/upload/init", headers=headers)
test_client.put(
f"/api/packs/upload/{jti}/chunk?offset=0",
content=b"q" * 50,
headers=headers,
)
r = test_client.post(f"/api/packs/upload/{jti}/complete", headers=headers)
assert r.status_code == 400
assert "미완료" in r.json()["detail"]
def test_chunk_init_filename_collision(tmp_path, monkeypatch):
"""init 시 동일 파일명이 PACK_BASE_DIR에 이미 있으면 409."""
monkeypatch.setattr("app.routes.PACK_BASE_DIR", tmp_path)
(tmp_path / "existing.zip").write_bytes(b"already here")
token = _mint("existing.zip", 100)
r = TestClient(app).post(
"/api/packs/upload/init",
headers={"Authorization": f"Bearer {token}"},
)
assert r.status_code == 409
def test_chunk_upload_stores_host_path(tmp_path, monkeypatch):
"""complete 시 Supabase에 저장되는 file_path는 PACK_HOST_DIR 기준."""
from pathlib import Path
container_base = tmp_path / "container"
host_base = Path("/volume1/host/packs")
monkeypatch.setattr("app.routes.PACK_BASE_DIR", container_base)
monkeypatch.setattr("app.routes.PACK_HOST_DIR", host_base)
captured = {}
fake_supabase = MagicMock()
def capture_insert(payload):
captured.update(payload)
m = MagicMock()
m.execute.return_value = MagicMock(data=[{"uploaded_at": "2026-05-12T00:00:00+00:00"}])
return m
fake_supabase.table.return_value.insert.side_effect = capture_insert
jti = str(uuid.uuid4())
token = _mint("hostpath_chunk.zip", 5, jti=jti)
headers = {"Authorization": f"Bearer {token}"}
with patch("app.routes._supabase", return_value=fake_supabase):
c = TestClient(app)
c.post("/api/packs/upload/init", headers=headers)
c.put(f"/api/packs/upload/{jti}/chunk?offset=0", content=b"hello", headers=headers)
r = c.post(f"/api/packs/upload/{jti}/complete", headers=headers)
assert r.status_code == 200
assert captured["file_path"] == str(host_base / "hostpath_chunk.zip")
def test_upload_stores_host_path_not_container_path(tmp_path, monkeypatch):
"""upload 시 Supabase에 저장되는 file_path는 PACK_BASE_DIR(컨테이너) 가 아닌 PACK_HOST_DIR(NAS 호스트) 절대경로여야 한다.
DSM API는 NAS 호스트 절대경로 기준이라 컨테이너 내부 경로(/app/data/packs/...)를
Supabase에 저장하면 sign-link 시 DSM이 파일을 못 찾는다.
"""
from pathlib import Path
container_base = tmp_path / "container"
host_base = Path("/volume1/docker/webpage/media/packs")
monkeypatch.setattr("app.routes.PACK_BASE_DIR", container_base)
monkeypatch.setattr("app.routes.PACK_HOST_DIR", host_base)
captured_insert = {}
fake_supabase = MagicMock()
def capture_insert(payload):
captured_insert.update(payload)
m = MagicMock()
m.execute.return_value = MagicMock(data=[{"uploaded_at": "2026-05-11T00:00:00+00:00"}])
return m
fake_supabase.table.return_value.insert.side_effect = capture_insert
token = auth.mint_upload_token({
"tier": "pro",
"label": "샘플",
"filename": "host_path_check.zip",
"size_bytes": 5,
"jti": str(uuid.uuid4()),
"expires_at": int(time.time()) + 1800,
})
with patch("app.routes._supabase", return_value=fake_supabase):
test_client = TestClient(app)
resp = test_client.post(
"/api/packs/upload",
files={"file": ("host_path_check.zip", b"hello")},
headers={"Authorization": f"Bearer {token}"},
)
assert resp.status_code == 200
# Supabase에 저장된 file_path는 호스트 경로
expected_host = str(host_base / "host_path_check.zip")
assert captured_insert["file_path"] == expected_host
# 응답의 file_path도 호스트 경로
assert resp.json()["file_path"] == expected_host
# 컨테이너 경로(tmp_path 하위)와 다름
assert str(container_base) not in captured_insert["file_path"]