Surface silent exceptions across remaining modules — ~70 sites

Final sweep. Covers:
- Downloads: candidates / lifecycle / master / monitor / wishlist_failed
- Metadata: source / registry / cache / common / artwork (+ plex_client)
- Imports: pipeline / resolution / file_ops / paths / guards
- Library: path_resolver / retag / duplicate_cleaner
- Stats / playlists / wishlist / discovery / automation / enrichment
- Misc: hydrabase_client, soulsync_client, tag_writer, debug_info,
  api_call_tracker, album_consistency, beatport_unified_scraper,
  reorganize_runner, seasonal_discovery, lidarr_download_client,
  services/sync_service.py, automation_engine, automation/progress

Two `_e` renames in imports/file_ops.py (outer scope binding `e`).
A few finally-block sites in metadata/album_mbid_cache.py,
library/track_identity.py, listening_stats_worker.py, watchlist/
auto_scan.py left silent — same reason as the rest of the sweep
(logger calls during cleanup paths can themselves raise).

Refs #369
pull/516/head
Broque Thomas 1 week ago
parent e95452b465
commit aa54bed818

@ -3186,8 +3186,8 @@ class BeatportUnifiedScraper:
except Exception:
continue
except Exception:
pass
except Exception as e:
logger.debug("parse release tracks failed: %s", e)
return tracks

@ -195,8 +195,8 @@ def _find_best_release(album_name, artist_name, track_count, mb_service):
sr_id = sr.get('id', '')
if sr_id and sr_id not in candidate_mbids:
candidate_mbids.append(sr_id)
except Exception:
pass
except Exception as e:
logger.debug("search_release fallback failed: %s", e)
if not candidate_mbids:
logger.info(f"No MB release found for '{album_name}' by '{artist_name}'")

@ -302,8 +302,8 @@ class ApiCallTracker:
try:
if os.path.exists(_PERSIST_PATH + '.tmp'):
os.remove(_PERSIST_PATH + '.tmp')
except Exception:
pass
except Exception as e:
logger.debug("remove stale tmp file failed: %s", e)
def _load(self):
"""Restore 24h minute history from disk. Called on init."""

@ -77,8 +77,8 @@ def update_progress(
if socketio_emit is not None:
try:
socketio_emit('automation:progress', {str(automation_id): dict(state)})
except Exception:
pass
except Exception as e:
logger.debug("socketio progress emit: %s", e)
def get_running_progress() -> dict[str, dict]:
@ -121,8 +121,8 @@ def record_history(
t0 = datetime.fromisoformat(started_at)
t1 = datetime.fromisoformat(finished_at)
duration = (t1 - t0).total_seconds()
except Exception:
pass
except Exception as e:
logger.debug("duration parse: %s", e)
r_status = result.get('status', 'completed') if result else 'completed'
if r_status == 'error':

@ -531,8 +531,8 @@ class AutomationEngine:
if self._history_record_fn:
try:
self._history_record_fn(automation_id, result)
except Exception:
pass
except Exception as e:
logger.debug("history record failed: %s", e)
# --- Schedule Execution (timer-based) ---
@ -673,8 +673,8 @@ class AutomationEngine:
delay = self._calc_delay_seconds(trigger_config)
if delay:
next_run_str = _utc_after(delay)
except Exception:
pass
except Exception as e:
logger.debug("next run calc failed: %s", e)
last_result = json.dumps(result) if result else None
self.db.update_automation_run(automation_id, next_run=next_run_str, error=error, last_result=last_result)
@ -682,8 +682,8 @@ class AutomationEngine:
if self._history_record_fn:
try:
self._history_record_fn(automation_id, result)
except Exception:
pass
except Exception as e:
logger.debug("history record failed: %s", e)
if self._running:
self.schedule_automation(automation_id)

@ -260,8 +260,8 @@ def get_debug_info():
for _pid, st in list(tidal_discovery_states.items()):
if st.get('phase') == 'syncing':
active_syncs += 1
except Exception:
pass
except Exception as e:
logger.debug("count active syncs failed: %s", e)
info['active_downloads'] = active_downloads
info['active_syncs'] = active_syncs

@ -648,8 +648,8 @@ def run_quality_scanner(scope='watchlist', profile_id=1, deps: QualityScannerDep
'low_quality': str(deps.quality_scanner_state.get('low_quality', 0)),
'total_scanned': str(deps.quality_scanner_state.get('processed', 0)),
})
except Exception:
pass
except Exception as e:
logger.debug("emit quality_scan_completed failed: %s", e)
except Exception as e:
logger.error(f"[Quality Scanner] Critical error: {e}")

@ -97,8 +97,8 @@ def attempt_download_with_candidates(task_id, candidates, track, batch_id=None,
if _bl_db.is_blacklisted(candidate.username, candidate.filename):
logger.info(f"[Modal Worker] Skipping blacklisted source: {source_key}")
continue
except Exception:
pass
except Exception as e:
logger.debug("blacklist check failed: %s", e)
# CRITICAL: Add source to used_sources IMMEDIATELY to prevent race conditions
# This must happen BEFORE starting download to prevent multiple retries from picking same source

@ -233,8 +233,8 @@ def on_download_completed(batch_id: str, task_id: str, success: bool, deps: Life
'title': track_info.get('track_name', ''),
'reason': track_info.get('failure_reason', 'Unknown'),
})
except Exception:
pass
except Exception as e:
logger.debug("download_failed emit failed: %s", e)
# WISHLIST REMOVAL: Handle successful downloads for wishlist removal
if success and task_id in download_tasks:
@ -364,8 +364,8 @@ def on_download_completed(batch_id: str, task_id: str, success: bool, deps: Life
'completed_tracks': str(successful_downloads),
'failed_tracks': str(failed_count),
})
except Exception:
pass
except Exception as e:
logger.debug("batch_complete emit failed: %s", e)
# Update YouTube playlist phase to 'download_complete' if this is a YouTube playlist
playlist_id = batch.get('playlist_id')
@ -569,8 +569,8 @@ def check_batch_completion_v2(batch_id: str, deps: LifecycleDeps) -> Optional[bo
'completed_tracks': str(successful_downloads),
'failed_tracks': str(failed_count),
})
except Exception:
pass
except Exception as e:
logger.debug("batch_complete emit failed: %s", e)
else:
logger.warning(f"[Completion Check V2] Batch {batch_id} already marked complete - skipping duplicate processing")
return True # Already complete

@ -296,8 +296,8 @@ def run_full_missing_tracks_process(batch_id, playlist_id, tracks_json, deps: Ma
})
if track_results:
db_sh.update_sync_history_track_results(batch_id, json.dumps(track_results))
except Exception:
pass
except Exception as e:
logger.debug("update sync_history track results failed: %s", e)
is_auto_batch = False
with tasks_lock:

@ -280,8 +280,8 @@ class WebUIDownloadMonitor:
all_downloads = run_async(
download_orchestrator.engine.get_all_downloads(exclude=('soulseek',))
)
except Exception:
pass
except Exception as e:
logger.debug("get_all_downloads failed: %s", e)
for download in all_downloads:
key = _make_context_key(download.username, download.filename)
# Convert DownloadStatus to transfer dict format for monitor compatibility

@ -146,8 +146,8 @@ def _process_failed_tracks_to_wishlist_exact(batch_id):
'title': track_name,
'reason': failed_track_info.get('failure_reason', ''),
})
except Exception:
pass
except Exception as e:
logger.debug("emit wishlist_item_added failed: %s", e)
else:
logger.error(f"[Wishlist Processing] Failed to add {track_name} to wishlist")

@ -67,8 +67,8 @@ def _drop_auto_pause_marker(service: EnrichmentService) -> None:
return
try:
_auto_paused_discard(service.auto_pause_token)
except Exception:
pass
except Exception as e:
logger.debug("auto-pause marker discard: %s", e)
def _add_yield_override(service: EnrichmentService) -> None:
@ -76,8 +76,8 @@ def _add_yield_override(service: EnrichmentService) -> None:
return
try:
_yield_override_add(service.auto_pause_token)
except Exception:
pass
except Exception as e:
logger.debug("yield override add: %s", e)
def create_blueprint() -> Blueprint:

@ -525,8 +525,8 @@ class HydrabaseClient:
for album in albums:
if album.image_url:
return album.image_url
except Exception:
pass
except Exception as e:
logger.debug("get artist image from albums failed: %s", e)
return None
def get_artist_albums(self, artist_id: str, album_type: str = 'album,single', limit: int = 50) -> List[Album]:

@ -362,8 +362,8 @@ def downsample_hires_flac(final_path, context):
if os.path.exists(temp_path):
try:
os.remove(temp_path)
except Exception:
pass
except Exception as _e:
logger.debug("cleanup downsample temp: %s", _e)
return None
@ -468,8 +468,8 @@ def create_lossy_copy(final_path):
if os.path.exists(out_path):
try:
os.remove(out_path)
except Exception:
pass
except Exception as _e:
logger.debug("cleanup lossy copy artifact: %s", _e)
return None
except subprocess.TimeoutExpired:
logger.warning(f"[Lossy Copy] Conversion timed out for: {os.path.basename(final_path)}")

@ -82,8 +82,8 @@ def move_to_quarantine(file_path: str, context: dict, reason: str, automation_en
"reason": reason or "Unknown",
},
)
except Exception:
pass
except Exception as e:
logger.debug("emit download_quarantined failed: %s", e)
return str(quarantine_path)

@ -188,8 +188,8 @@ def _replace_template_variables(template: str, context: dict) -> str:
resolved = resolved_client.resolve_primary_artist(itunes_artist_id)
if resolved and resolved != album_artist_value:
album_artist_value = resolved
except Exception:
pass
except Exception as e:
logger.debug("resolve primary artist failed: %s", e)
# $cdnum — smart CD label for multi-disc filenames. Produces "CD01" /
# "CD02" etc. when the album has 2+ discs, empty string otherwise.

@ -434,8 +434,8 @@ def post_process_matched_download(context_key, context, file_path, runtime, meta
logger.error(f"Quarantine failed ({quarantine_error}), deleting file: {file_path}")
try:
os.remove(file_path)
except Exception:
pass
except Exception as e:
logger.debug("delete quarantine fallback: %s", e)
context['_bitdepth_rejected'] = True
with matched_context_lock:
@ -562,8 +562,8 @@ def post_process_matched_download(context_key, context, file_path, runtime, meta
logger.error(f"Quarantine failed ({quarantine_error}), deleting file: {file_path}")
try:
os.remove(file_path)
except Exception:
pass
except Exception as e:
logger.debug("delete quarantine fallback: %s", e)
context['_bitdepth_rejected'] = True
with matched_context_lock:

@ -407,8 +407,8 @@ def get_single_track_import_context(
'genres',
default=[],
) or []
except Exception:
pass
except Exception as e:
logger.debug("override artist genres: %s", e)
return payload
except Exception as exc:
logger.debug("Override track lookup failed on %s for %s: %s", chosen_source, override_id, exc)
@ -461,8 +461,8 @@ def get_single_track_import_context(
'genres',
default=[],
) or []
except Exception:
pass
except Exception as e:
logger.debug("artist genres lookup: %s", e)
return payload
return _build_single_import_fallback_context(title, artist, source_priority)

@ -209,8 +209,8 @@ def _run_duplicate_cleaner():
'duplicates_found': str(duplicates_found),
'space_freed': f"{space_mb:.1f} MB",
})
except Exception:
pass
except Exception as e:
logger.debug("emit duplicate_scan_completed failed: %s", e)
except Exception as e:
logger.error(f"[Duplicate Cleaner] Critical error: {e}")

@ -83,8 +83,8 @@ def _collect_base_dirs(
candidates.append(_docker_resolve_path(transfer_cfg))
if download_cfg:
candidates.append(_docker_resolve_path(download_cfg))
except Exception:
pass
except Exception as e:
logger.debug("soulseek paths read failed: %s", e)
# Plex-reported library locations (handles "Plex scanned at /music but
# SoulSync mounts at /library" cases).
@ -96,8 +96,8 @@ def _collect_base_dirs(
for loc in getattr(music_library, "locations", []) or []:
if loc:
candidates.append(loc)
except Exception:
pass
except Exception as e:
logger.debug("plex locations read failed: %s", e)
# User-configured library music paths (Settings → Library → Music Paths).
if config_manager is not None:
@ -107,8 +107,8 @@ def _collect_base_dirs(
for p in music_paths:
if isinstance(p, str) and p.strip():
candidates.append(_docker_resolve_path(p.strip()))
except Exception:
pass
except Exception as e:
logger.debug("music paths read failed: %s", e)
# De-duplicate while preserving order, drop empties / non-existent dirs.
seen: set[str] = set()

@ -265,8 +265,8 @@ def execute_retag(group_id, album_id, deps: RetagDeps):
try:
os.remove(old_cover)
logger.warning("[Retag] Removed orphaned cover.jpg from old directory")
except Exception:
pass
except Exception as e:
logger.debug("remove orphaned cover failed: %s", e)
# Cleanup old empty directories
transfer_dir = deps.docker_resolve_path(deps.config_manager.get('soulseek.transfer_path', './Transfer'))

@ -490,8 +490,8 @@ class LidarrDownloadClient(DownloadSourcePlugin):
try:
self._api_delete(f'album/{lidarr_album_id}', params={'deleteFiles': 'false'})
logger.debug(f"Cleaned up album {lidarr_album_id} from Lidarr")
except Exception:
pass
except Exception as e:
logger.debug("Lidarr album cleanup failed: %s", e)
except Exception as e:
logger.error(f"Lidarr download thread failed: {e}")

@ -320,8 +320,8 @@ def download_cover_art(album_info: dict, target_dir: str, context: dict = None):
from core.spotify_client import _upgrade_spotify_image_url
art_url = _upgrade_spotify_image_url(art_url)
except Exception:
pass
except Exception as e:
logger.debug("upgrade spotify image url failed: %s", e)
elif art_url and "mzstatic.com" in art_url:
art_url = re.sub(r"\d+x\d+bb", "3000x3000bb", art_url)
if not art_url:

@ -33,8 +33,8 @@ def get_metadata_cache():
try:
import threading
threading.Thread(target=_cache_instance.backfill_deezer_album_genres, daemon=True).start()
except Exception:
pass
except Exception as e:
logger.debug("start deezer genres backfill failed: %s", e)
return _cache_instance

@ -171,8 +171,8 @@ def get_image_dimensions(data: bytes):
return w, h
length = struct.unpack(">H", data[i + 2 : i + 4])[0]
i += 2 + length
except Exception:
pass
except Exception as e:
logger.debug("parse JPEG dimensions failed: %s", e)
return None, None

@ -278,8 +278,8 @@ def get_hydrabase_client(allow_fallback: bool = True, require_enabled: bool = Tr
if client and client.is_connected():
if not require_enabled or bool(_dev_mode_enabled_provider()):
return client
except Exception:
pass
except Exception as e:
logger.debug("hydrabase client lookup: %s", e)
if allow_fallback:
return get_itunes_client()
@ -394,8 +394,8 @@ def get_client_for_source(
client = get_spotify_client(client_factory=spotify_client_factory)
if client and client.is_spotify_authenticated():
return client
except Exception:
pass
except Exception as e:
logger.debug("spotify client get_for_source: %s", e)
return None
if source == "deezer":

@ -296,8 +296,8 @@ def _process_musicbrainz_source(pp: dict, metadata: dict, cfg, runtime, track_ti
try:
from core.metadata import album_mbid_cache as _persisted_cache
_persisted_cache.record(normalized_album_key, artist_key, release_mbid)
except Exception:
pass
except Exception as e:
logger.debug("MBID cache persist failed: %s", e)
pp["release_mbid"] = release_mbid or ""
if pp["release_mbid"]:
pp["id_tags"]["MUSICBRAINZ_RELEASE_ID"] = pp["release_mbid"]
@ -959,8 +959,8 @@ def extract_source_metadata(context: dict, artist: dict, album_info: dict) -> di
resolved = itunes_client.resolve_primary_artist(artist_id)
if resolved and resolved != raw_album_artist:
raw_album_artist = resolved
except Exception:
pass
except Exception as e:
logger.debug("itunes primary artist resolve failed: %s", e)
metadata["album_artist"] = raw_album_artist
if album_info.get("is_album"):
@ -1136,8 +1136,8 @@ def embed_source_ids(audio_file, metadata: dict, context: dict = None, runtime=N
)
if isrc_value:
context["_isrc"] = str(isrc_value)
except Exception:
pass
except Exception as e:
logger.debug("context isrc copy failed: %s", e)
except Exception as exc:
logger.error("Error embedding source IDs (non-fatal): %s", exc)

@ -176,8 +176,8 @@ def playlist_explorer_build_tree(deps: PlaylistExplorerDeps):
artist_image = images[0].get('url') if images else None
elif hasattr(artist_info, 'image_url'):
artist_image = artist_info.image_url
except Exception:
pass
except Exception as e:
logger.debug("artist image resolve: %s", e)
else:
# No pre-resolved ID — search by name
try:
@ -256,8 +256,8 @@ def playlist_explorer_build_tree(deps: PlaylistExplorerDeps):
if cache and releases:
try:
cache.store_entity(source_name, 'artist_discography', cache_key, result)
except Exception:
pass
except Exception as e:
logger.debug("cache discography write: %s", e)
return result

@ -405,8 +405,8 @@ class PlexClient(MediaServerClient):
new_count = getattr(a, 'leafCount', 0) or 0
if new_count > existing_count:
by_name[name_key] = a
except Exception:
pass
except Exception as e:
logger.debug("artist leafCount compare failed: %s", e)
return list(by_name.values())
def _dedupe_albums(self, albums: List[PlexAlbum]) -> List[PlexAlbum]:
@ -424,8 +424,8 @@ class PlexClient(MediaServerClient):
artist = ''
try:
artist = (getattr(alb, 'parentTitle', '') or '').strip().lower()
except Exception:
pass
except Exception as e:
logger.debug("album parentTitle read failed: %s", e)
key = (artist, title)
existing = by_key.get(key)
if existing is None:
@ -436,8 +436,8 @@ class PlexClient(MediaServerClient):
new_count = getattr(alb, 'leafCount', 0) or 0
if new_count > existing_count:
by_key[key] = alb
except Exception:
pass
except Exception as e:
logger.debug("album leafCount compare failed: %s", e)
return list(by_key.values())
def get_all_playlists(self) -> List[PlaylistInfo]:

@ -95,8 +95,8 @@ def build_runner(
def _cleanup_empty(src_dir):
try:
cleanup_empty_directories_fn(transfer_dir, os.path.join(src_dir, '_'))
except Exception:
pass
except Exception as e:
logger.debug("cleanup empty dirs failed: %s", e)
def _on_progress(updates):
try:

@ -201,8 +201,8 @@ class SeasonalDiscoveryService:
val = row[0] if isinstance(row, tuple) else row['value']
if val in ('northern', 'southern'):
return val
except Exception:
pass
except Exception as e:
logger.debug("read hemisphere metadata failed: %s", e)
return 'northern'
def get_current_season(self) -> Optional[str]:

@ -253,8 +253,8 @@ class SoulSyncClient(MediaServerClient):
if self._progress_callback:
try:
self._progress_callback(msg)
except Exception:
pass
except Exception as e:
logger.debug("progress callback failed: %s", e)
# ── Core Scanning ──

@ -84,8 +84,8 @@ def get_top_artists(database, image_url_fixer: ImageUrlFixer, time_range: str, l
artist['soul_id'] = row[4]
finally:
conn.close()
except Exception:
pass
except Exception as e:
logger.debug("top artists enrich failed: %s", e)
return artists
@ -114,8 +114,8 @@ def get_top_albums(database, image_url_fixer: ImageUrlFixer, time_range: str, li
album['artist_id'] = row[2]
finally:
conn.close()
except Exception:
pass
except Exception as e:
logger.debug("top albums enrich failed: %s", e)
return albums
@ -146,8 +146,8 @@ def get_top_tracks(database, image_url_fixer: ImageUrlFixer, time_range: str, li
track['artist_id'] = row[2]
finally:
conn.close()
except Exception:
pass
except Exception as e:
logger.debug("top tracks enrich failed: %s", e)
return tracks

@ -131,8 +131,8 @@ def read_file_tags(file_path: str) -> Dict[str, Any]:
result['replaygain_track_peak'] = rg.get('track_peak')
result['replaygain_album_gain'] = rg.get('album_gain')
result['replaygain_album_peak'] = rg.get('album_peak')
except Exception:
pass
except Exception as e:
logger.debug("read replaygain tags failed: %s", e)
return result

@ -215,8 +215,8 @@ def finalize_auto_wishlist_completion(
'tracks_found': str(tracks_added),
'tracks_failed': str(total_failed - tracks_added),
})
except Exception:
pass
except Exception as e:
logger.debug("emit wishlist_processing_completed failed: %s", e)
return completion_summary

@ -545,8 +545,8 @@ class PlaylistSyncService:
spotify_id, me.clean_title(original_title), me.clean_artist(artist_name),
active_server, db_track.id, db_track.title, confidence
)
except Exception:
pass
except Exception as e:
logger.debug("save sync match cache failed: %s", e)
# Fetch the actual track object from active media server using the database track ID
try:

Loading…
Cancel
Save