mirror of https://github.com/Nezreka/SoulSync.git
Closes #584. Quarantined files used to sit in ss_quarantine/ with a thin sidecar — no UI, no recovery, no way to see what got dropped. This adds the management surface the user needs without going to the filesystem. UI: new "Quarantine" button on the downloads page header opens a modal with every quarantined file (filename, expected track/artist, reason, when, size). Three actions per row: - Approve (one-click): restores the file, re-runs the post-process pipeline with ONLY the failing check skipped, lands in the library with full tags + lyrics + scan - Recover (legacy fallback): moves to Staging for thin-sidecar entries that lack the embedded context Approve needs - Delete: permanent removal of file + sidecar Per-check bypass: context['_skip_quarantine_check'] = 'integrity' / 'acoustid' / 'bit_depth'. Skips ONLY the named check — other quality gates stay live. No blanket bypass-all flag. Sidecar expansion: move_to_quarantine now persists the full json-serializable context via serialize_quarantine_context (drops non-JSON-safe values, walks nested dicts/lists/sets, str-coerces unknown objects) plus the trigger name. Existing thin sidecars are detected and routed to Recover instead of Approve. Pure helpers in core/imports/quarantine.py: list_quarantine_entries / delete_quarantine_entry / approve_quarantine_entry / recover_to_staging / serialize_quarantine_context. 27 tests pin every shape: orphan files / orphan sidecars / corrupt sidecars / collision-safe filename restoration / full-context vs thin-sidecar dispatch / json round-trip safety. Four new endpoints in web_server.py — thin glue around the helpers: GET /api/quarantine/list, DELETE /api/quarantine/<id>, POST /api/quarantine/<id>/approve, POST /api/quarantine/<id>/recover. Download modal status differentiates "🛡️ Quarantined" from "❌ Failed" so recoverable files are visible at a glance — checked against the error_message text, no schema change needed. Pipeline changes are three minimal per-check conditionals at the existing quarantine sites in core/imports/pipeline.py. Each move_to_quarantine call now passes its trigger name so the sidecar records which check fired. Full suite: 2992 passed.pull/591/head
parent
dbe1a9e451
commit
f4cff78f13
@ -0,0 +1,329 @@
|
||||
"""Quarantine entry management — pure helpers for list/delete/approve/recover.
|
||||
|
||||
Quarantined files live in `<download_path>/ss_quarantine/` as
|
||||
`<timestamp>_<original>.<ext>.quarantined` paired with a JSON sidecar
|
||||
`<timestamp>_<original>.json` written by `core.imports.guards.move_to_quarantine`.
|
||||
|
||||
This module provides the read/write/restore primitives. Web routes are
|
||||
thin glue around these. Pipeline re-run on approval is the caller's
|
||||
job (we hand back `(file_path, context, bypass_check)`).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
from utils.logging_config import get_logger
|
||||
|
||||
logger = get_logger("imports.quarantine")
|
||||
|
||||
|
||||
_QUARANTINE_SUFFIX = ".quarantined"
|
||||
|
||||
|
||||
# JSON-serializable scalar predicate. dict / list values get walked
|
||||
# recursively; anything else is dropped during sidecar serialization.
|
||||
_SAFE_SCALARS = (str, int, float, bool, type(None))
|
||||
|
||||
|
||||
def serialize_quarantine_context(context: Any) -> Dict[str, Any]:
|
||||
"""Walk a context dict and emit a JSON-safe copy.
|
||||
|
||||
Drops non-serializable values (sets, custom objects, callables,
|
||||
open file handles, etc) silently — sidecar must round-trip through
|
||||
`json.dump` / `json.load` without raising. Lists are walked element
|
||||
by element; dicts are walked recursively. Anything that isn't a
|
||||
scalar / dict / list is converted to a string fallback so caller
|
||||
still sees *something* (rather than a silent drop) but won't break
|
||||
the JSON write.
|
||||
"""
|
||||
if not isinstance(context, dict):
|
||||
return {}
|
||||
return _coerce_dict(context)
|
||||
|
||||
|
||||
def _coerce_value(value: Any) -> Any:
|
||||
if isinstance(value, _SAFE_SCALARS):
|
||||
return value
|
||||
if isinstance(value, dict):
|
||||
return _coerce_dict(value)
|
||||
if isinstance(value, (list, tuple)):
|
||||
return [_coerce_value(v) for v in value]
|
||||
if isinstance(value, set):
|
||||
return [_coerce_value(v) for v in value]
|
||||
# Fallback — preserve via str() so caller sees the value's shape
|
||||
# without breaking JSON serialization.
|
||||
try:
|
||||
return str(value)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _coerce_dict(d: Dict[str, Any]) -> Dict[str, Any]:
|
||||
out: Dict[str, Any] = {}
|
||||
for key, value in d.items():
|
||||
if not isinstance(key, str):
|
||||
try:
|
||||
key = str(key)
|
||||
except Exception:
|
||||
continue
|
||||
out[key] = _coerce_value(value)
|
||||
return out
|
||||
|
||||
|
||||
def _entry_id_from_filename(quarantined_filename: str) -> str:
|
||||
"""Derive a stable entry id from the quarantined filename.
|
||||
|
||||
Strip the `.quarantined` suffix; strip the original file extension;
|
||||
return the bare `<timestamp>_<original>` stem. Sidecar uses the
|
||||
same stem with a `.json` extension, so the id pairs both sides.
|
||||
"""
|
||||
base = quarantined_filename
|
||||
if base.endswith(_QUARANTINE_SUFFIX):
|
||||
base = base[: -len(_QUARANTINE_SUFFIX)]
|
||||
return Path(base).stem
|
||||
|
||||
|
||||
def list_quarantine_entries(quarantine_dir: str) -> List[Dict[str, Any]]:
|
||||
"""Enumerate quarantined files paired with their sidecars.
|
||||
|
||||
Returns one dict per `.quarantined` file with: id, filename,
|
||||
original_filename (from sidecar), reason, expected_track,
|
||||
expected_artist, timestamp, size_bytes, has_full_context (True
|
||||
when the sidecar carries a `context` field — required for one-click
|
||||
Approve), trigger (which check fired: integrity / acoustid /
|
||||
bit_depth / unknown).
|
||||
|
||||
Orphaned `.quarantined` files (no sidecar) still surface — caller
|
||||
can delete them. Orphaned sidecars (no file) are skipped silently.
|
||||
Sorted newest-first by timestamp prefix.
|
||||
"""
|
||||
entries: List[Dict[str, Any]] = []
|
||||
if not os.path.isdir(quarantine_dir):
|
||||
return entries
|
||||
|
||||
for name in os.listdir(quarantine_dir):
|
||||
if not name.endswith(_QUARANTINE_SUFFIX):
|
||||
continue
|
||||
full_path = os.path.join(quarantine_dir, name)
|
||||
if not os.path.isfile(full_path):
|
||||
continue
|
||||
|
||||
entry_id = _entry_id_from_filename(name)
|
||||
sidecar_path = os.path.join(quarantine_dir, f"{entry_id}.json")
|
||||
sidecar: Dict[str, Any] = {}
|
||||
if os.path.isfile(sidecar_path):
|
||||
try:
|
||||
with open(sidecar_path, encoding="utf-8") as f:
|
||||
loaded = json.load(f)
|
||||
if isinstance(loaded, dict):
|
||||
sidecar = loaded
|
||||
except Exception as exc:
|
||||
logger.debug("sidecar read failed for %s: %s", entry_id, exc)
|
||||
|
||||
try:
|
||||
size_bytes = os.path.getsize(full_path)
|
||||
except OSError:
|
||||
size_bytes = 0
|
||||
|
||||
entries.append(
|
||||
{
|
||||
"id": entry_id,
|
||||
"filename": name,
|
||||
"original_filename": sidecar.get("original_filename", name),
|
||||
"reason": sidecar.get("quarantine_reason", "Unknown reason"),
|
||||
"expected_track": sidecar.get("expected_track", ""),
|
||||
"expected_artist": sidecar.get("expected_artist", ""),
|
||||
"timestamp": sidecar.get("timestamp", ""),
|
||||
"size_bytes": size_bytes,
|
||||
"has_full_context": isinstance(sidecar.get("context"), dict),
|
||||
"trigger": sidecar.get("trigger", "unknown"),
|
||||
}
|
||||
)
|
||||
|
||||
entries.sort(key=lambda e: e["id"], reverse=True)
|
||||
return entries
|
||||
|
||||
|
||||
def _resolve_entry_paths(quarantine_dir: str, entry_id: str) -> Tuple[Optional[str], Optional[str]]:
|
||||
"""Locate the `.quarantined` file + JSON sidecar for an entry id.
|
||||
|
||||
Returns (file_path, sidecar_path), either may be None if missing.
|
||||
"""
|
||||
if not os.path.isdir(quarantine_dir) or not entry_id:
|
||||
return None, None
|
||||
file_path: Optional[str] = None
|
||||
for name in os.listdir(quarantine_dir):
|
||||
if not name.endswith(_QUARANTINE_SUFFIX):
|
||||
continue
|
||||
if _entry_id_from_filename(name) == entry_id:
|
||||
file_path = os.path.join(quarantine_dir, name)
|
||||
break
|
||||
sidecar_path = os.path.join(quarantine_dir, f"{entry_id}.json")
|
||||
if not os.path.isfile(sidecar_path):
|
||||
sidecar_path = None
|
||||
return file_path, sidecar_path
|
||||
|
||||
|
||||
def delete_quarantine_entry(quarantine_dir: str, entry_id: str) -> bool:
|
||||
"""Delete the quarantined file + sidecar for the given entry id.
|
||||
|
||||
Returns True if at least one of the two was removed. False when
|
||||
neither existed (entry already gone).
|
||||
"""
|
||||
file_path, sidecar_path = _resolve_entry_paths(quarantine_dir, entry_id)
|
||||
removed = False
|
||||
if file_path and os.path.isfile(file_path):
|
||||
try:
|
||||
os.remove(file_path)
|
||||
removed = True
|
||||
except OSError as exc:
|
||||
logger.error("Failed to delete quarantine file %s: %s", file_path, exc)
|
||||
if sidecar_path and os.path.isfile(sidecar_path):
|
||||
try:
|
||||
os.remove(sidecar_path)
|
||||
removed = True
|
||||
except OSError as exc:
|
||||
logger.error("Failed to delete quarantine sidecar %s: %s", sidecar_path, exc)
|
||||
return removed
|
||||
|
||||
|
||||
def _restore_filename(quarantined_filename: str, sidecar_original: Optional[str] = None) -> str:
|
||||
"""Resolve the filename to restore.
|
||||
|
||||
Sidecar's `original_filename` wins when provided — it's the
|
||||
canonical record of what the file was named before quarantine.
|
||||
Otherwise parse the `<YYYYMMDD_HHMMSS>_<original>.<ext>.quarantined`
|
||||
convention written by `move_to_quarantine`, dropping the timestamp
|
||||
prefix and `.quarantined` suffix. Final fallback returns the
|
||||
quarantined filename minus the suffix unchanged.
|
||||
"""
|
||||
if sidecar_original:
|
||||
return sidecar_original
|
||||
base = quarantined_filename
|
||||
if base.endswith(_QUARANTINE_SUFFIX):
|
||||
base = base[: -len(_QUARANTINE_SUFFIX)]
|
||||
parts = base.split("_", 2)
|
||||
if len(parts) >= 3 and parts[0].isdigit() and parts[1].isdigit():
|
||||
return parts[2]
|
||||
return base
|
||||
|
||||
|
||||
def approve_quarantine_entry(
|
||||
quarantine_dir: str,
|
||||
entry_id: str,
|
||||
restore_dir: str,
|
||||
) -> Optional[Tuple[str, Dict[str, Any], str]]:
|
||||
"""Restore a quarantined file for re-import via the post-process pipeline.
|
||||
|
||||
Reads the sidecar's `context` + `trigger`, moves the file out of
|
||||
quarantine to `restore_dir` (with the original filename + extension),
|
||||
deletes the sidecar.
|
||||
|
||||
Returns `(restored_file_path, context, trigger)` so the caller can
|
||||
set the appropriate `_skip_quarantine_check` bypass flag and
|
||||
dispatch the post-process pipeline.
|
||||
|
||||
Returns None when:
|
||||
- the entry doesn't exist
|
||||
- the sidecar lacks a serialized `context` (legacy thin sidecar
|
||||
— caller should fall back to `recover_to_staging` instead)
|
||||
- the file move fails
|
||||
"""
|
||||
file_path, sidecar_path = _resolve_entry_paths(quarantine_dir, entry_id)
|
||||
if not file_path or not sidecar_path:
|
||||
logger.warning("approve: entry %s missing file or sidecar", entry_id)
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(sidecar_path, encoding="utf-8") as f:
|
||||
sidecar = json.load(f)
|
||||
except Exception as exc:
|
||||
logger.error("approve: sidecar read failed for %s: %s", entry_id, exc)
|
||||
return None
|
||||
|
||||
context = sidecar.get("context")
|
||||
if not isinstance(context, dict):
|
||||
logger.info("approve: entry %s has thin sidecar (no context) — caller should recover-to-staging", entry_id)
|
||||
return None
|
||||
|
||||
trigger = str(sidecar.get("trigger", "unknown"))
|
||||
|
||||
original_name = sidecar.get("original_filename") or _restore_filename(os.path.basename(file_path))
|
||||
os.makedirs(restore_dir, exist_ok=True)
|
||||
restored_path = os.path.join(restore_dir, original_name)
|
||||
restored_path = _ensure_unique_path(restored_path)
|
||||
|
||||
try:
|
||||
shutil.move(file_path, restored_path)
|
||||
except OSError as exc:
|
||||
logger.error("approve: failed to restore %s -> %s: %s", file_path, restored_path, exc)
|
||||
return None
|
||||
|
||||
try:
|
||||
os.remove(sidecar_path)
|
||||
except OSError as exc:
|
||||
logger.warning("approve: failed to remove sidecar %s: %s", sidecar_path, exc)
|
||||
|
||||
return restored_path, context, trigger
|
||||
|
||||
|
||||
def recover_to_staging(
|
||||
quarantine_dir: str,
|
||||
staging_dir: str,
|
||||
entry_id: str,
|
||||
) -> Optional[str]:
|
||||
"""Move a quarantined file into Staging for manual import.
|
||||
|
||||
Strips the timestamp prefix + `.quarantined` suffix, drops the file
|
||||
into `staging_dir` so the user can finish via the existing Import
|
||||
flow. Sidecar is removed. Used as the fallback path for legacy thin
|
||||
sidecars (no embedded `context`) where one-click Approve is
|
||||
impossible.
|
||||
"""
|
||||
file_path, sidecar_path = _resolve_entry_paths(quarantine_dir, entry_id)
|
||||
if not file_path:
|
||||
return None
|
||||
|
||||
sidecar_original = None
|
||||
if sidecar_path:
|
||||
try:
|
||||
with open(sidecar_path, encoding="utf-8") as f:
|
||||
sidecar_original = json.load(f).get("original_filename")
|
||||
except Exception as exc:
|
||||
logger.debug("recover: sidecar read failed for %s: %s", entry_id, exc)
|
||||
|
||||
restored_name = _restore_filename(os.path.basename(file_path), sidecar_original)
|
||||
os.makedirs(staging_dir, exist_ok=True)
|
||||
target = _ensure_unique_path(os.path.join(staging_dir, restored_name))
|
||||
|
||||
try:
|
||||
shutil.move(file_path, target)
|
||||
except OSError as exc:
|
||||
logger.error("recover: failed to move %s -> %s: %s", file_path, target, exc)
|
||||
return None
|
||||
|
||||
if sidecar_path and os.path.isfile(sidecar_path):
|
||||
try:
|
||||
os.remove(sidecar_path)
|
||||
except OSError as exc:
|
||||
logger.warning("recover: failed to remove sidecar %s: %s", sidecar_path, exc)
|
||||
|
||||
return target
|
||||
|
||||
|
||||
def _ensure_unique_path(target: str) -> str:
|
||||
"""Append `_(2)`, `_(3)`, ... before the extension when target exists."""
|
||||
if not os.path.exists(target):
|
||||
return target
|
||||
base, ext = os.path.splitext(target)
|
||||
counter = 2
|
||||
while True:
|
||||
candidate = f"{base}_({counter}){ext}"
|
||||
if not os.path.exists(candidate):
|
||||
return candidate
|
||||
counter += 1
|
||||
@ -0,0 +1,273 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
from core.imports.quarantine import (
|
||||
approve_quarantine_entry,
|
||||
delete_quarantine_entry,
|
||||
list_quarantine_entries,
|
||||
recover_to_staging,
|
||||
serialize_quarantine_context,
|
||||
)
|
||||
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────
|
||||
# serialize_quarantine_context — JSON-safe coercion
|
||||
# ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_serialize_passes_scalar_dict_unchanged():
|
||||
ctx = {"title": "DNA.", "track_number": 2, "active": True, "missing": None, "duration_ms": 185000}
|
||||
out = serialize_quarantine_context(ctx)
|
||||
assert out == ctx
|
||||
|
||||
|
||||
def test_serialize_walks_nested_dicts():
|
||||
ctx = {"track_info": {"name": "DNA.", "artists": [{"name": "Kendrick"}, {"name": "Rihanna"}]}}
|
||||
out = serialize_quarantine_context(ctx)
|
||||
assert out == ctx
|
||||
|
||||
|
||||
def test_serialize_coerces_set_to_list():
|
||||
ctx = {"sources": {"spotify", "deezer"}}
|
||||
out = serialize_quarantine_context(ctx)
|
||||
assert sorted(out["sources"]) == ["deezer", "spotify"]
|
||||
|
||||
|
||||
def test_serialize_coerces_tuple_to_list():
|
||||
ctx = {"pair": (1, 2, 3)}
|
||||
out = serialize_quarantine_context(ctx)
|
||||
assert out == {"pair": [1, 2, 3]}
|
||||
|
||||
|
||||
def test_serialize_stringifies_unknown_objects():
|
||||
class Custom:
|
||||
def __str__(self):
|
||||
return "<custom obj>"
|
||||
out = serialize_quarantine_context({"obj": Custom()})
|
||||
assert out["obj"] == "<custom obj>"
|
||||
|
||||
|
||||
def test_serialize_non_dict_returns_empty_dict():
|
||||
assert serialize_quarantine_context(None) == {}
|
||||
assert serialize_quarantine_context("string") == {}
|
||||
assert serialize_quarantine_context([1, 2, 3]) == {}
|
||||
|
||||
|
||||
def test_serialize_round_trips_through_json():
|
||||
ctx = {
|
||||
"track_info": {"name": "X", "artists": [{"name": "A"}, {"name": "B"}]},
|
||||
"spotify_artist": {"name": "A", "id": "abc"},
|
||||
"duration_ms": 180000,
|
||||
"sources": {"spotify"},
|
||||
}
|
||||
serialized = serialize_quarantine_context(ctx)
|
||||
json.dumps(serialized) # must not raise
|
||||
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────
|
||||
# list_quarantine_entries
|
||||
# ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
def _write_entry(quarantine_dir, entry_id, original_name, *, with_context=False, trigger="integrity", reason="boom", file_bytes=b"X" * 100):
|
||||
qfile = quarantine_dir / f"{entry_id}_{original_name}.quarantined"
|
||||
qfile.write_bytes(file_bytes)
|
||||
sidecar = {
|
||||
"original_filename": original_name,
|
||||
"quarantine_reason": reason,
|
||||
"expected_track": "Track",
|
||||
"expected_artist": "Artist",
|
||||
"timestamp": "2026-05-14T12:00:00",
|
||||
"trigger": trigger,
|
||||
}
|
||||
if with_context:
|
||||
sidecar["context"] = {"track_info": {"name": "Track"}, "context_key": entry_id}
|
||||
sidecar_path = quarantine_dir / f"{entry_id}_{os.path.splitext(original_name)[0]}.json"
|
||||
sidecar_path.write_text(json.dumps(sidecar))
|
||||
return qfile, sidecar_path
|
||||
|
||||
|
||||
def test_list_returns_empty_for_missing_dir(tmp_path):
|
||||
assert list_quarantine_entries(str(tmp_path / "nope")) == []
|
||||
|
||||
|
||||
def test_list_returns_empty_for_empty_dir(tmp_path):
|
||||
assert list_quarantine_entries(str(tmp_path)) == []
|
||||
|
||||
|
||||
def test_list_returns_entry_with_sidecar_fields(tmp_path):
|
||||
_write_entry(tmp_path, "20260514_120000", "song.flac", reason="Duration mismatch")
|
||||
entries = list_quarantine_entries(str(tmp_path))
|
||||
assert len(entries) == 1
|
||||
e = entries[0]
|
||||
assert e["original_filename"] == "song.flac"
|
||||
assert e["reason"] == "Duration mismatch"
|
||||
assert e["expected_track"] == "Track"
|
||||
assert e["expected_artist"] == "Artist"
|
||||
assert e["has_full_context"] is False
|
||||
assert e["trigger"] == "integrity"
|
||||
assert e["size_bytes"] == 100
|
||||
|
||||
|
||||
def test_list_flags_full_context_entries(tmp_path):
|
||||
_write_entry(tmp_path, "20260514_120000", "song.flac", with_context=True)
|
||||
entries = list_quarantine_entries(str(tmp_path))
|
||||
assert entries[0]["has_full_context"] is True
|
||||
|
||||
|
||||
def test_list_handles_orphan_quarantined_file_without_sidecar(tmp_path):
|
||||
qfile = tmp_path / "20260514_120000_orphan.flac.quarantined"
|
||||
qfile.write_bytes(b"X")
|
||||
entries = list_quarantine_entries(str(tmp_path))
|
||||
assert len(entries) == 1
|
||||
assert entries[0]["reason"] == "Unknown reason"
|
||||
assert entries[0]["has_full_context"] is False
|
||||
|
||||
|
||||
def test_list_skips_orphan_sidecars_without_file(tmp_path):
|
||||
sidecar = tmp_path / "20260514_120000_only.json"
|
||||
sidecar.write_text(json.dumps({"original_filename": "only.flac", "quarantine_reason": "x"}))
|
||||
assert list_quarantine_entries(str(tmp_path)) == []
|
||||
|
||||
|
||||
def test_list_sorts_newest_first(tmp_path):
|
||||
_write_entry(tmp_path, "20260101_120000", "old.flac")
|
||||
_write_entry(tmp_path, "20260514_120000", "new.flac")
|
||||
entries = list_quarantine_entries(str(tmp_path))
|
||||
assert entries[0]["original_filename"] == "new.flac"
|
||||
assert entries[1]["original_filename"] == "old.flac"
|
||||
|
||||
|
||||
def test_list_swallows_corrupt_sidecar_gracefully(tmp_path):
|
||||
qfile = tmp_path / "20260514_120000_song.flac.quarantined"
|
||||
qfile.write_bytes(b"X")
|
||||
sidecar = tmp_path / "20260514_120000_song.json"
|
||||
sidecar.write_text("{ this is not valid json")
|
||||
entries = list_quarantine_entries(str(tmp_path))
|
||||
assert len(entries) == 1
|
||||
assert entries[0]["reason"] == "Unknown reason"
|
||||
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────
|
||||
# delete_quarantine_entry
|
||||
# ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_delete_removes_both_file_and_sidecar(tmp_path):
|
||||
_write_entry(tmp_path, "20260514_120000", "song.flac")
|
||||
assert delete_quarantine_entry(str(tmp_path), "20260514_120000_song") is True
|
||||
assert not (tmp_path / "20260514_120000_song.flac.quarantined").exists()
|
||||
assert not (tmp_path / "20260514_120000_song.json").exists()
|
||||
|
||||
|
||||
def test_delete_returns_false_when_entry_missing(tmp_path):
|
||||
assert delete_quarantine_entry(str(tmp_path), "nonexistent") is False
|
||||
|
||||
|
||||
def test_delete_handles_orphan_file_without_sidecar(tmp_path):
|
||||
qfile = tmp_path / "20260514_120000_orphan.flac.quarantined"
|
||||
qfile.write_bytes(b"X")
|
||||
assert delete_quarantine_entry(str(tmp_path), "20260514_120000_orphan") is True
|
||||
assert not qfile.exists()
|
||||
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────
|
||||
# approve_quarantine_entry — full-context path
|
||||
# ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_approve_restores_file_and_returns_context_and_trigger(tmp_path):
|
||||
quarantine = tmp_path / "ss_quarantine"
|
||||
quarantine.mkdir()
|
||||
restore = tmp_path / "restore"
|
||||
|
||||
_write_entry(quarantine, "20260514_120000", "song.flac", with_context=True, trigger="integrity")
|
||||
|
||||
result = approve_quarantine_entry(str(quarantine), "20260514_120000_song", str(restore))
|
||||
assert result is not None
|
||||
restored_path, context, trigger = result
|
||||
assert os.path.basename(restored_path) == "song.flac"
|
||||
assert os.path.isfile(restored_path)
|
||||
assert context["track_info"]["name"] == "Track"
|
||||
assert trigger == "integrity"
|
||||
# Sidecar removed after approve
|
||||
assert not (quarantine / "20260514_120000_song.json").exists()
|
||||
|
||||
|
||||
def test_approve_returns_none_for_thin_sidecar_without_context(tmp_path):
|
||||
_write_entry(tmp_path, "20260514_120000", "song.flac", with_context=False)
|
||||
result = approve_quarantine_entry(str(tmp_path), "20260514_120000_song", str(tmp_path / "restore"))
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_approve_returns_none_for_missing_entry(tmp_path):
|
||||
assert approve_quarantine_entry(str(tmp_path), "nope", str(tmp_path)) is None
|
||||
|
||||
|
||||
def test_approve_avoids_filename_collision(tmp_path):
|
||||
quarantine = tmp_path / "q"
|
||||
quarantine.mkdir()
|
||||
restore = tmp_path / "r"
|
||||
restore.mkdir()
|
||||
(restore / "song.flac").write_bytes(b"existing")
|
||||
_write_entry(quarantine, "20260514_120000", "song.flac", with_context=True)
|
||||
result = approve_quarantine_entry(str(quarantine), "20260514_120000_song", str(restore))
|
||||
assert result is not None
|
||||
restored_path = result[0]
|
||||
assert os.path.basename(restored_path) == "song_(2).flac"
|
||||
assert (restore / "song.flac").read_bytes() == b"existing"
|
||||
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────
|
||||
# recover_to_staging — fallback for thin sidecars
|
||||
# ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
def test_recover_strips_prefix_and_suffix(tmp_path):
|
||||
quarantine = tmp_path / "q"
|
||||
quarantine.mkdir()
|
||||
staging = tmp_path / "s"
|
||||
|
||||
qfile, _ = _write_entry(quarantine, "20260514_120000", "song.flac")
|
||||
|
||||
target = recover_to_staging(str(quarantine), str(staging), "20260514_120000_song")
|
||||
assert target is not None
|
||||
assert os.path.basename(target) == "song.flac"
|
||||
assert os.path.isfile(target)
|
||||
assert not qfile.exists()
|
||||
|
||||
|
||||
def test_recover_uses_sidecar_original_filename_when_available(tmp_path):
|
||||
quarantine = tmp_path / "q"
|
||||
quarantine.mkdir()
|
||||
staging = tmp_path / "s"
|
||||
qfile = quarantine / "20260514_120000_munged_name.flac.quarantined"
|
||||
qfile.write_bytes(b"X")
|
||||
sidecar = quarantine / "20260514_120000_munged_name.json"
|
||||
sidecar.write_text(json.dumps({"original_filename": "Pretty Track Name.flac"}))
|
||||
|
||||
target = recover_to_staging(str(quarantine), str(staging), "20260514_120000_munged_name")
|
||||
assert target is not None
|
||||
assert os.path.basename(target) == "Pretty Track Name.flac"
|
||||
|
||||
|
||||
def test_recover_returns_none_for_missing_entry(tmp_path):
|
||||
assert recover_to_staging(str(tmp_path / "q"), str(tmp_path / "s"), "nope") is None
|
||||
|
||||
|
||||
def test_recover_avoids_filename_collision(tmp_path):
|
||||
quarantine = tmp_path / "q"
|
||||
quarantine.mkdir()
|
||||
staging = tmp_path / "s"
|
||||
staging.mkdir()
|
||||
(staging / "song.flac").write_bytes(b"existing")
|
||||
_write_entry(quarantine, "20260514_120000", "song.flac")
|
||||
|
||||
target = recover_to_staging(str(quarantine), str(staging), "20260514_120000_song")
|
||||
assert target is not None
|
||||
assert os.path.basename(target) == "song_(2).flac"
|
||||
|
||||
|
||||
def test_recover_removes_sidecar_after_move(tmp_path):
|
||||
quarantine = tmp_path / "q"
|
||||
quarantine.mkdir()
|
||||
staging = tmp_path / "s"
|
||||
_, sidecar = _write_entry(quarantine, "20260514_120000", "song.flac")
|
||||
|
||||
recover_to_staging(str(quarantine), str(staging), "20260514_120000_song")
|
||||
assert not sidecar.exists()
|
||||
Loading…
Reference in new issue