persistent when closed

pull/15/head
Broque Thomas 9 months ago
parent 9f4e007818
commit 17533ae40b

@ -120,6 +120,57 @@ download_tasks = {} # task_id -> task state dict
download_batches = {} # batch_id -> {queue, active_count, max_concurrent}
tasks_lock = threading.Lock()
# --- Shared Transfer Data Cache ---
# Cache transfer data to avoid hammering the Soulseek API with multiple concurrent modals
transfer_data_cache = {
'data': {},
'last_update': 0,
'update_lock': threading.Lock(),
'cache_duration': 1.5 # Cache for 1.5 seconds to reduce API calls
}
def get_cached_transfer_data():
"""
Get transfer data with caching to reduce API calls when multiple modals are active.
Returns a lookup dictionary for efficient transfer matching.
"""
current_time = time.time()
with transfer_data_cache['update_lock']:
# Check if cache is still valid
if (current_time - transfer_data_cache['last_update']) < transfer_data_cache['cache_duration']:
return transfer_data_cache['data']
# Cache expired or empty, fetch new data
live_transfers_lookup = {}
try:
transfers_data = asyncio.run(soulseek_client._make_request('GET', 'transfers/downloads'))
if transfers_data:
all_transfers = []
for user_data in transfers_data:
username = user_data.get('username', 'Unknown')
if 'directories' in user_data:
for directory in user_data['directories']:
if 'files' in directory:
for file_info in directory['files']:
file_info['username'] = username
all_transfers.append(file_info)
for transfer in all_transfers:
key = f"{transfer.get('username')}::{os.path.basename(transfer.get('filename', ''))}"
live_transfers_lookup[key] = transfer
# Update cache
transfer_data_cache['data'] = live_transfers_lookup
transfer_data_cache['last_update'] = current_time
except Exception as e:
print(f"⚠️ Could not fetch live transfers (cached): {e}")
# Return empty dict on error, but don't update cache timestamp
# This way we'll retry on the next request
return {}
return live_transfers_lookup
# --- Background Download Monitoring (GUI Parity) ---
class WebUIDownloadMonitor:
"""
@ -3353,6 +3404,93 @@ def _on_download_completed(batch_id, task_id, success=True):
print(f"🔄 [Batch Manager] Starting next batch for {batch_id}")
_start_next_batch_of_downloads(batch_id)
def _run_full_missing_tracks_process(batch_id, playlist_id, tracks_json):
"""
A master worker that handles the entire missing tracks process:
1. Runs the analysis.
2. If missing tracks are found, it automatically queues them for download.
"""
try:
# PHASE 1: ANALYSIS
with tasks_lock:
if batch_id in download_batches:
download_batches[batch_id]['phase'] = 'analysis'
download_batches[batch_id]['analysis_total'] = len(tracks_json)
download_batches[batch_id]['analysis_processed'] = 0
from database.music_database import MusicDatabase
db = MusicDatabase()
active_server = config_manager.get_active_media_server()
analysis_results = []
for i, track_data in enumerate(tracks_json):
track_name = track_data.get('name', '')
artists = track_data.get('artists', [])
found, confidence = False, 0.0
for artist in artists:
artist_name = artist if isinstance(artist, str) else str(artist)
db_track, track_confidence = db.check_track_exists(
track_name, artist_name, confidence_threshold=0.7, server_source=active_server
)
if db_track and track_confidence >= 0.7:
found, confidence = True, track_confidence
break
analysis_results.append({
'track_index': i, 'track': track_data, 'found': found, 'confidence': confidence
})
with tasks_lock:
if batch_id in download_batches:
download_batches[batch_id]['analysis_processed'] = i + 1
# Store incremental results for live updates
download_batches[batch_id]['analysis_results'] = analysis_results.copy()
missing_tracks = [res for res in analysis_results if not res['found']]
with tasks_lock:
if batch_id in download_batches:
download_batches[batch_id]['analysis_results'] = analysis_results
# PHASE 2: TRANSITION TO DOWNLOAD (if necessary)
if not missing_tracks:
print(f"✅ Analysis for batch {batch_id} complete. No missing tracks.")
with tasks_lock:
if batch_id in download_batches:
download_batches[batch_id]['phase'] = 'complete'
return
print(f" transitioning batch {batch_id} to download phase with {len(missing_tracks)} tracks.")
with tasks_lock:
if batch_id not in download_batches: return
download_batches[batch_id]['phase'] = 'downloading'
for res in missing_tracks:
task_id = str(uuid.uuid4())
download_tasks[task_id] = {
'status': 'pending', 'track_info': res['track'],
'playlist_id': playlist_id, 'batch_id': batch_id,
'track_index': res['track_index'], 'retry_count': 0,
'cached_candidates': [], 'used_sources': set(),
'status_change_time': time.time()
}
download_batches[batch_id]['queue'].append(task_id)
download_monitor.start_monitoring(batch_id)
_start_next_batch_of_downloads(batch_id)
except Exception as e:
print(f"❌ Master worker for batch {batch_id} failed: {e}")
import traceback
traceback.print_exc()
with tasks_lock:
if batch_id in download_batches:
download_batches[batch_id]['phase'] = 'error'
download_batches[batch_id]['error'] = str(e)
def _download_track_worker(task_id, batch_id=None):
"""
Enhanced download worker that matches the GUI's exact retry logic.
@ -3728,94 +3866,88 @@ def start_playlist_missing_downloads(playlist_id):
print(f"❌ Error starting missing downloads: {e}")
return jsonify({"success": False, "error": str(e)}), 500
@app.route('/api/active-processes', methods=['GET'])
def get_active_processes():
"""
This endpoint now only needs to check for active download batches,
as the analysis phase is now part of the batch process.
"""
active_processes = []
with tasks_lock:
for batch_id, batch_data in download_batches.items():
if batch_data.get('phase') not in ['complete', 'error', 'cancelled']:
active_processes.append({
"type": "batch",
"playlist_id": batch_data.get('playlist_id'),
"playlist_name": batch_data.get('playlist_name'),
"batch_id": batch_id,
"phase": batch_data.get('phase')
})
return jsonify({"active_processes": active_processes})
@app.route('/api/playlists/<batch_id>/download_status', methods=['GET'])
def get_batch_download_status(batch_id):
"""
Returns real-time status for all tasks in a batch. This version correctly
parses the live 'state' from slskd to report 'completed' status, fixing
the UI issue where downloads would get stuck at 100%.
Returns real-time status for a batch, now including the
current phase (analysis, downloading, etc.) and analysis progress.
"""
try:
# --- Fetch live transfer data from slskd ---
live_transfers_lookup = {}
try:
transfers_data = asyncio.run(soulseek_client._make_request('GET', 'transfers/downloads'))
if transfers_data:
all_transfers = []
for user_data in transfers_data:
username = user_data.get('username', 'Unknown')
if 'directories' in user_data:
for directory in user_data['directories']:
if 'files' in directory:
for file_info in directory['files']:
file_info['username'] = username
all_transfers.append(file_info)
# Create a lookup dictionary using a reliable composite key
for transfer in all_transfers:
key = f"{transfer.get('username')}::{os.path.basename(transfer.get('filename', ''))}"
live_transfers_lookup[key] = transfer
except Exception as e:
print(f"⚠️ Could not fetch live transfers for modal status: {e}")
# Use cached transfer data to reduce API calls with multiple concurrent modals
live_transfers_lookup = get_cached_transfer_data()
# --- Process tasks and enrich with live data ---
with tasks_lock:
batch_tasks = []
if batch_id not in download_batches:
return jsonify({"tasks": []})
return jsonify({"error": "Batch not found"}), 404
for task_id in download_batches[batch_id].get('queue', []):
task = download_tasks.get(task_id)
if not task:
continue
batch = download_batches[batch_id]
response_data = {
"phase": batch.get('phase', 'unknown'),
"error": batch.get('error')
}
task_status = {
'task_id': task_id,
'track_index': task['track_index'],
'status': task['status'],
'track_info': task['track_info'],
'progress': 0
if response_data["phase"] == 'analysis':
response_data['analysis_progress'] = {
'total': batch.get('analysis_total', 0),
'processed': batch.get('analysis_processed', 0)
}
response_data['analysis_results'] = batch.get('analysis_results', [])
# --- USE THE RELIABLE KEY FOR MATCHING ---
task_filename = task.get('filename') or task['track_info'].get('filename')
task_username = task.get('username') or task['track_info'].get('username')
if task_filename and task_username:
lookup_key = f"{task_username}::{os.path.basename(task_filename)}"
if lookup_key in live_transfers_lookup:
live_info = live_transfers_lookup[lookup_key]
# --- THIS IS THE KEY FIX ---
# Correctly parse the live state string from the API
state_str = live_info.get('state', 'Unknown')
if 'Completed' in state_str or 'Succeeded' in state_str:
task_status['status'] = 'completed'
elif 'Cancelled' in state_str or 'Canceled' in state_str:
task_status['status'] = 'cancelled'
elif 'Failed' in state_str or 'Errored' in state_str:
task_status['status'] = 'failed'
elif 'InProgress' in state_str:
task_status['status'] = 'downloading'
else:
task_status['status'] = 'queued'
# --- END OF FIX ---
task_status['progress'] = live_info.get('percentComplete', 0)
print(f"🔧 [Status API] Live Update for Task {task_id}: Status '{task_status['status']}', Progress {task_status['progress']}%")
batch_tasks.append(task_status)
elif response_data["phase"] in ['downloading', 'complete', 'error']:
response_data['analysis_results'] = batch.get('analysis_results', [])
batch_tasks = []
for task_id in batch.get('queue', []):
task = download_tasks.get(task_id)
if not task: continue
task_status = {
'task_id': task_id,
'track_index': task['track_index'],
'status': task['status'],
'track_info': task['track_info'],
'progress': 0
}
task_filename = task.get('filename') or task['track_info'].get('filename')
task_username = task.get('username') or task['track_info'].get('username')
if task_filename and task_username:
lookup_key = f"{task_username}::{os.path.basename(task_filename)}"
if lookup_key in live_transfers_lookup:
live_info = live_transfers_lookup[lookup_key]
state_str = live_info.get('state', 'Unknown')
if 'Completed' in state_str or 'Succeeded' in state_str: task_status['status'] = 'completed'
elif 'Cancelled' in state_str or 'Canceled' in state_str: task_status['status'] = 'cancelled'
elif 'Failed' in state_str or 'Errored' in state_str: task_status['status'] = 'failed'
elif 'InProgress' in state_str: task_status['status'] = 'downloading'
else: task_status['status'] = 'queued'
task_status['progress'] = live_info.get('percentComplete', 0)
batch_tasks.append(task_status)
batch_tasks.sort(key=lambda x: x['track_index'])
response_data['tasks'] = batch_tasks
return jsonify(response_data)
batch_tasks.sort(key=lambda x: x['track_index'])
return jsonify({"tasks": batch_tasks})
except Exception as e:
import traceback
traceback.print_exc()
print(f"❌ Error getting batch status: {e}")
return jsonify({"error": str(e)}), 500
@app.route('/api/downloads/cancel_task', methods=['POST'])
@ -3917,159 +4049,86 @@ def cancel_download_task():
except Exception as e:
return jsonify({"success": False, "error": str(e)}), 500
# ===============================
# == TRACK ANALYSIS API ==
# ===============================
# Global state for track analysis tasks
analysis_executor = ThreadPoolExecutor(max_workers=2, thread_name_prefix="AnalysisWorker")
analysis_tasks = {} # task_id -> analysis state
analysis_lock = threading.Lock()
def _run_track_analysis_task(task_id, tracks_json):
"""Run track analysis in background thread (same logic as GUI's PlaylistTrackAnalysisWorker)"""
import uuid
from database.music_database import MusicDatabase
from config.settings import config_manager
print(f"🔍 Starting track analysis task {task_id} for {len(tracks_json)} tracks")
@app.route('/api/playlists/<batch_id>/cancel_batch', methods=['POST'])
def cancel_batch(batch_id):
"""
Cancels an entire batch - useful for cancelling during analysis phase
or cancelling all downloads at once.
"""
try:
# Initialize database connection
db = MusicDatabase()
active_server = config_manager.get_active_media_server()
results = []
total_tracks = len(tracks_json)
for i, track_data in enumerate(tracks_json):
with analysis_lock:
# Check if task was cancelled
if analysis_tasks.get(task_id, {}).get('status') == 'cancelled':
print(f"❌ Analysis task {task_id} was cancelled")
return
track_name = track_data.get('name', '')
artists = track_data.get('artists', [])
with tasks_lock:
if batch_id not in download_batches:
return jsonify({"success": False, "error": "Batch not found"}), 404
# Try each artist for matching (same as GUI logic)
found = False
confidence = 0.0
# Mark batch as cancelled
download_batches[batch_id]['phase'] = 'cancelled'
for artist in artists:
artist_name = artist if isinstance(artist, str) else str(artist)
# Check database for track existence
db_track, track_confidence = db.check_track_exists(
track_name, artist_name,
confidence_threshold=0.7,
server_source=active_server
)
if db_track and track_confidence >= 0.7:
found = True
confidence = track_confidence
print(f"✅ Found: '{track_name}' by {artist_name} (confidence: {confidence:.2f})")
break
# Cancel all individual tasks in the batch
cancelled_count = 0
for task_id in download_batches[batch_id].get('queue', []):
if task_id in download_tasks:
task = download_tasks[task_id]
if task['status'] not in ['completed', 'cancelled']:
task['status'] = 'cancelled'
cancelled_count += 1
if not found:
print(f"❌ Missing: '{track_name}' by {artists}")
print(f"✅ Cancelled batch {batch_id} with {cancelled_count} tasks")
return jsonify({"success": True, "cancelled_tasks": cancelled_count})
# Store result
result = {
'track_index': i,
'track': track_data,
'found': found,
'confidence': confidence
}
results.append(result)
# Update progress
progress = int((i + 1) / total_tracks * 100)
with analysis_lock:
if task_id in analysis_tasks:
analysis_tasks[task_id].update({
'progress': progress,
'processed': i + 1,
'results': results.copy() # Store current results
})
# Mark as complete
with analysis_lock:
if task_id in analysis_tasks:
analysis_tasks[task_id].update({
'status': 'complete',
'progress': 100,
'results': results,
'total_found': len([r for r in results if r['found']]),
'total_missing': len([r for r in results if not r['found']])
})
print(f"✅ Analysis complete: {len([r for r in results if r['found']])} found, {len([r for r in results if not r['found']])} missing")
except Exception as e:
print(f"❌ Analysis task {task_id} failed: {e}")
with analysis_lock:
if task_id in analysis_tasks:
analysis_tasks[task_id].update({
'status': 'error',
'error': str(e)
})
print(f"❌ Error cancelling batch {batch_id}: {e}")
return jsonify({"success": False, "error": str(e)}), 500
# ===============================
# == UNIFIED MISSING TRACKS API ==
# ===============================
@app.route('/api/tracks/analyze', methods=['POST'])
def start_track_analysis():
"""Start track analysis to check which tracks exist in media server library"""
@app.route('/api/playlists/<playlist_id>/start-missing-process', methods=['POST'])
def start_missing_tracks_process(playlist_id):
"""
A single, robust endpoint to kick off the entire missing tracks workflow.
It creates a batch and starts the master worker in the background.
"""
data = request.get_json()
tracks = data.get('tracks', [])
playlist_name = data.get('playlist_name', 'Unknown Playlist')
if not tracks:
return jsonify({"success": False, "error": "No tracks provided"}), 400
# Generate unique task ID
import uuid
task_id = str(uuid.uuid4())
# Initialize task state
with analysis_lock:
analysis_tasks[task_id] = {
'status': 'running',
'progress': 0,
'total': len(tracks),
'processed': 0,
'results': [],
'total_found': 0,
'total_missing': 0
# Limit concurrent analysis processes to prevent resource exhaustion
with tasks_lock:
active_analysis_count = sum(1 for batch in download_batches.values()
if batch.get('phase') == 'analysis')
if active_analysis_count >= 3: # Allow max 3 concurrent analysis processes
return jsonify({
"success": False,
"error": "Too many analysis processes running. Please wait for one to complete."
}), 429
batch_id = str(uuid.uuid4())
with tasks_lock:
download_batches[batch_id] = {
'phase': 'analysis',
'playlist_id': playlist_id,
'playlist_name': playlist_name,
'queue': [],
'active_count': 0,
'max_concurrent': 3,
'queue_index': 0,
'analysis_total': len(tracks),
'analysis_processed': 0,
'analysis_results': []
}
# Submit analysis task
future = analysis_executor.submit(_run_track_analysis_task, task_id, tracks)
missing_download_executor.submit(_run_full_missing_tracks_process, batch_id, playlist_id, tracks)
return jsonify({
"success": True,
"task_id": task_id,
"total_tracks": len(tracks)
"batch_id": batch_id
})
@app.route('/api/tracks/analyze/status/<task_id>', methods=['GET'])
def get_analysis_status(task_id):
"""Get status of track analysis task"""
with analysis_lock:
task = analysis_tasks.get(task_id)
if not task:
return jsonify({"error": "Task not found"}), 404
return jsonify(task)
@app.route('/api/tracks/analyze/cancel/<task_id>', methods=['POST'])
def cancel_analysis_task(task_id):
"""Cancel a running analysis task"""
with analysis_lock:
if task_id in analysis_tasks:
analysis_tasks[task_id]['status'] = 'cancelled'
return jsonify({"success": True, "message": "Task cancelled"})
else:
return jsonify({"success": False, "error": "Task not found"}), 404
@app.route('/api/tracks/download_missing', methods=['POST'])
def start_missing_downloads():
"""Legacy endpoint - redirect to new playlist-based endpoint"""

@ -1398,6 +1398,53 @@ async function loadSyncData() {
}
}
async function checkForActiveProcesses() {
try {
const response = await fetch('/api/active-processes');
if (!response.ok) return;
const data = await response.json();
const processes = data.active_processes || [];
if (processes.length > 0) {
console.log(`🔄 Found ${processes.length} active process(es) from backend. Rehydrating UI...`);
for (const processInfo of processes) {
if (!activeDownloadProcesses[processInfo.playlist_id]) {
rehydrateModal(processInfo);
}
}
}
} catch (error) {
console.error('Failed to check for active processes:', error);
}
}
async function rehydrateModal(processInfo) {
const { playlist_id, playlist_name, batch_id } = processInfo;
console.log(`💧 Rehydrating modal for playlist "${playlist_name}" (batch: ${batch_id})`);
let playlistData = spotifyPlaylists.find(p => p.id === playlist_id);
if (!playlistData) {
console.warn(`Cannot rehydrate modal: Playlist data for ${playlist_id} not loaded.`);
return;
}
await openDownloadMissingModal(playlist_id);
const process = activeDownloadProcesses[playlist_id];
if (!process) return;
process.status = 'running';
process.batchId = batch_id;
updatePlaylistCardUI(playlist_id);
updateRefreshButtonState();
document.getElementById(`begin-analysis-btn-${playlist_id}`).style.display = 'none';
document.getElementById(`cancel-all-btn-${playlist_id}`).style.display = 'inline-block';
startModalDownloadPolling(playlist_id);
process.modalElement.style.display = 'none';
}
async function loadSpotifyPlaylists() {
const container = document.getElementById('spotify-playlist-container');
const refreshBtn = document.getElementById('spotify-refresh-btn');
@ -1415,6 +1462,9 @@ async function loadSpotifyPlaylists() {
spotifyPlaylists = await response.json();
renderSpotifyPlaylists();
spotifyPlaylistsLoaded = true;
await checkForActiveProcesses();
} catch (error) {
container.innerHTML = `<div class="playlist-placeholder">❌ Error: ${error.message}</div>`;
showToast(`Error loading playlists: ${error.message}`, 'error');
@ -1675,6 +1725,7 @@ async function openDownloadMissingModal(playlistId) {
// **NEW**: Check if a process is already active for this playlist
if (activeDownloadProcesses[playlistId]) {
console.log(`Modal for ${playlistId} already exists. Showing it.`);
closePlaylistDetailsModal(); // Close playlist details modal even when reusing existing modal
const process = activeDownloadProcesses[playlistId];
if (process.modalElement) {
process.modalElement.style.display = 'flex';
@ -1809,7 +1860,7 @@ async function openDownloadMissingModal(playlistId) {
<div class="download-missing-modal-footer">
<div class="download-phase-controls">
<button class="download-control-btn primary" id="begin-analysis-btn-${playlistId}" onclick="startTrackAnalysis('${playlistId}')">
<button class="download-control-btn primary" id="begin-analysis-btn-${playlistId}" onclick="startMissingTracksProcess('${playlistId}')">
Begin Analysis
</button>
<button class="download-control-btn danger" id="cancel-all-btn-${playlistId}" onclick="cancelAllOperations('${playlistId}')" style="display: none;">
@ -1850,80 +1901,45 @@ function closeDownloadMissingModal(playlistId) {
}
}
async function startTrackAnalysis(playlistId) {
async function startMissingTracksProcess(playlistId) {
const process = activeDownloadProcesses[playlistId];
if (!process) return;
console.log(`🔍 Starting track analysis for ${process.tracks.length} tracks in playlist ${playlistId}`);
console.log(`🚀 Kicking off unified missing tracks process for playlist: ${playlistId}`);
try {
process.status = 'running';
updatePlaylistCardUI(playlistId);
updateRefreshButtonState();
document.getElementById(`begin-analysis-btn-${playlistId}`).style.display = 'none';
document.getElementById(`cancel-all-btn-${playlistId}`).style.display = 'inline-block';
document.getElementById(`analysis-progress-text-${playlistId}`).textContent = 'Starting analysis...';
const response = await fetch('/api/tracks/analyze', {
const response = await fetch(`/api/playlists/${playlistId}/start-missing-process`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ tracks: process.tracks })
body: JSON.stringify({
tracks: process.tracks,
playlist_name: process.playlist.name
})
});
const data = await response.json();
if (!data.success) throw new Error(data.error);
process.analysisTaskId = data.task_id;
startAnalysisPolling(playlistId);
if (!data.success) {
// Special handling for rate limit
if (response.status === 429) {
throw new Error(`${data.error} Try closing some other download processes first.`);
}
throw new Error(data.error);
}
process.batchId = data.batch_id;
startModalDownloadPolling(playlistId);
} catch (error) {
showToast(`Failed to start analysis: ${error.message}`, 'error');
showToast(`Failed to start process: ${error.message}`, 'error');
process.status = 'cancelled';
cleanupDownloadProcess(playlistId);
}
}
function startAnalysisPolling(playlistId) {
const process = activeDownloadProcesses[playlistId];
if (!process || !process.analysisTaskId) return;
const poller = setInterval(async () => {
// If process is gone, stop polling
if (!activeDownloadProcesses[playlistId]) {
clearInterval(poller);
return;
}
try {
const response = await fetch(`/api/tracks/analyze/status/${process.analysisTaskId}`);
const status = await response.json();
if (status.error) throw new Error(status.error);
document.getElementById(`analysis-progress-fill-${playlistId}`).style.width = `${status.progress || 0}%`;
document.getElementById(`analysis-progress-text-${playlistId}`).textContent =
`${status.processed || 0}/${status.total || 0} tracks analyzed (${status.progress || 0}%)`;
if (status.results) {
updateTrackAnalysisResults(playlistId, status.results);
}
if (status.status === 'complete') {
clearInterval(poller);
onAnalysisComplete(playlistId, status);
} else if (status.status === 'error' || status.status === 'cancelled') {
clearInterval(poller);
throw new Error(status.error || 'Analysis was cancelled');
}
} catch (error) {
clearInterval(poller);
showToast(`Analysis failed: ${error.message}`, 'error');
process.status = 'cancelled';
cleanupDownloadProcess(playlistId);
}
}, 1000);
}
function updateTrackAnalysisResults(playlistId, results) {
for (const result of results) {
@ -1935,67 +1951,11 @@ function updateTrackAnalysisResults(playlistId, results) {
}
}
function onAnalysisComplete(playlistId, status) {
const process = activeDownloadProcesses[playlistId];
if (!process) return;
document.getElementById(`stat-found-${playlistId}`).textContent = status.total_found || 0;
document.getElementById(`stat-missing-${playlistId}`).textContent = status.total_missing || 0;
document.getElementById(`analysis-progress-text-${playlistId}`).textContent = 'Analysis complete!';
process.analysisResults = status.results || [];
process.missingTracks = process.analysisResults.filter(r => !r.found);
if (process.missingTracks.length > 0) {
initiateMissingDownloads(playlistId);
} else {
showToast('All tracks were found in your library!', 'success');
process.status = 'complete';
document.getElementById(`cancel-all-btn-${playlistId}`).style.display = 'none';
// Don't auto-close, let user close it.
}
}
async function initiateMissingDownloads(playlistId) {
const process = activeDownloadProcesses[playlistId];
if (!process || process.missingTracks.length === 0) return;
try {
document.getElementById(`download-progress-text-${playlistId}`).textContent = 'Initiating downloads...';
for (const result of process.missingTracks) {
const statusElement = document.getElementById(`download-${playlistId}-${result.track_index}`);
const actionsElement = document.getElementById(`actions-${playlistId}-${result.track_index}`);
if (statusElement) statusElement.textContent = '⏸️ Pending';
if (actionsElement) actionsElement.innerHTML = `<button class="cancel-track-btn" onclick="cancelTrackDownload('${playlistId}', ${result.track_index})">Cancel</button>`;
}
const response = await fetch(`/api/playlists/${playlistId}/download_missing`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
missing_tracks: process.missingTracks.map(r => ({ track: r.track, track_index: r.track_index }))
})
});
const data = await response.json();
if (!data.success) throw new Error(data.error);
process.batchId = data.batch_id;
startModalDownloadPolling(playlistId);
showToast(`Started downloads for ${process.missingTracks.length} tracks.`, 'success');
} catch (error) {
showToast(`Failed to start downloads: ${error.message}`, 'error');
process.status = 'cancelled';
cleanupDownloadProcess(playlistId);
}
}
function startModalDownloadPolling(playlistId) {
const process = activeDownloadProcesses[playlistId];
if (!process || !process.batchId) return;
if (process.poller) clearInterval(process.poller);
process.poller = setInterval(async () => {
@ -2003,62 +1963,85 @@ function startModalDownloadPolling(playlistId) {
clearInterval(process.poller);
return;
}
try {
const response = await fetch(`/api/playlists/${process.batchId}/download_status`);
const data = await response.json();
if (data.error) throw new Error(data.error);
let completedCount = 0;
let failedOrCancelledCount = 0;
(data.tasks || []).forEach(task => {
const statusElement = document.getElementById(`download-${playlistId}-${task.track_index}`);
const row = statusElement ? statusElement.closest('tr') : null;
if (!row) return;
if (row.dataset.locallyCancelled === 'true') {
failedOrCancelledCount++;
return;
}
row.dataset.taskId = task.task_id;
let statusText = '';
switch (task.status) {
case 'pending': statusText = '⏸️ Pending'; break;
case 'searching': statusText = '🔍 Searching...'; break;
case 'downloading': statusText = `⏬ Downloading... ${Math.round(task.progress || 0)}%`; break;
case 'completed': statusText = '✅ Completed'; completedCount++; break;
case 'failed': statusText = '❌ Failed'; failedOrCancelledCount++; break;
case 'cancelled': statusText = '🚫 Cancelled'; failedOrCancelledCount++; break;
default: statusText = `${task.status}`; break;
if (data.phase === 'analysis') {
const progress = data.analysis_progress;
const percent = progress.total > 0 ? (progress.processed / progress.total) * 100 : 0;
document.getElementById(`analysis-progress-fill-${playlistId}`).style.width = `${percent}%`;
document.getElementById(`analysis-progress-text-${playlistId}`).textContent =
`${progress.processed}/${progress.total} tracks analyzed`;
if (data.analysis_results) {
updateTrackAnalysisResults(playlistId, data.analysis_results);
// Update stats when we first get analysis results
const foundCount = data.analysis_results.filter(r => r.found).length;
const missingCount = data.analysis_results.filter(r => !r.found).length;
document.getElementById(`stat-found-${playlistId}`).textContent = foundCount;
document.getElementById(`stat-missing-${playlistId}`).textContent = missingCount;
}
statusElement.textContent = statusText;
if (['completed', 'failed', 'cancelled'].includes(task.status)) {
document.getElementById(`actions-${playlistId}-${task.track_index}`).innerHTML = '-';
} else if (data.phase === 'downloading' || data.phase === 'complete' || data.phase === 'error') {
if (document.getElementById(`analysis-progress-fill-${playlistId}`).style.width !== '100%') {
document.getElementById(`analysis-progress-fill-${playlistId}`).style.width = '100%';
document.getElementById(`analysis-progress-text-${playlistId}`).textContent = 'Analysis complete!';
if(data.analysis_results) {
updateTrackAnalysisResults(playlistId, data.analysis_results);
const foundCount = data.analysis_results.filter(r => r.found).length;
const missingCount = data.analysis_results.filter(r => !r.found).length;
document.getElementById(`stat-found-${playlistId}`).textContent = foundCount;
document.getElementById(`stat-missing-${playlistId}`).textContent = missingCount;
}
}
});
const missingTracks = (data.analysis_results || []).filter(r => !r.found);
const missingCount = missingTracks.length;
let completedCount = 0;
let failedOrCancelledCount = 0;
(data.tasks || []).forEach(task => {
const row = document.querySelector(`#download-missing-modal-${playlistId} tr[data-track-index="${task.track_index}"]`);
if (!row) return;
if (row.dataset.locallyCancelled === 'true') {
failedOrCancelledCount++;
return;
}
row.dataset.taskId = task.task_id;
const statusEl = document.getElementById(`download-${playlistId}-${task.track_index}`);
const actionsEl = document.getElementById(`actions-${playlistId}-${task.track_index}`);
let statusText = '';
switch (task.status) {
case 'pending': statusText = '⏸️ Pending'; break;
case 'searching': statusText = '🔍 Searching...'; break;
case 'downloading': statusText = `⏬ Downloading... ${Math.round(task.progress || 0)}%`; break;
case 'completed': statusText = '✅ Completed'; completedCount++; break;
case 'failed': statusText = '❌ Failed'; failedOrCancelledCount++; break;
case 'cancelled': statusText = '🚫 Cancelled'; failedOrCancelledCount++; break;
default: statusText = `${task.status}`; break;
}
if(statusEl) statusEl.textContent = statusText;
if (actionsEl && ['completed', 'failed', 'cancelled'].includes(task.status)) {
actionsEl.innerHTML = '-';
}
});
const totalMissing = process.missingTracks.length;
const totalFinished = completedCount + failedOrCancelledCount;
const progressPercent = totalMissing > 0 ? (totalFinished / totalMissing) * 100 : 0;
document.getElementById(`download-progress-fill-${playlistId}`).style.width = `${progressPercent}%`;
document.getElementById(`download-progress-text-${playlistId}`).textContent =
`${completedCount}/${totalMissing} completed (${progressPercent.toFixed(0)}%)`;
document.getElementById(`stat-downloaded-${playlistId}`).textContent = completedCount;
if (totalFinished >= totalMissing) {
process.status = 'complete';
showToast(`Downloads complete for ${process.playlist.name}!`, 'success');
document.getElementById(`cancel-all-btn-${playlistId}`).style.display = 'none';
clearInterval(process.poller);
process.poller = null;
updatePlaylistCardUI(playlistId); // Final card update
const totalFinished = completedCount + failedOrCancelledCount;
const progressPercent = missingCount > 0 ? (totalFinished / missingCount) * 100 : 0;
document.getElementById(`download-progress-fill-${playlistId}`).style.width = `${progressPercent}%`;
document.getElementById(`download-progress-text-${playlistId}`).textContent = `${completedCount}/${missingCount} completed (${progressPercent.toFixed(0)}%)`;
document.getElementById(`stat-downloaded-${playlistId}`).textContent = completedCount;
if (data.phase === 'complete' || data.phase === 'error' || (missingCount > 0 && totalFinished >= missingCount)) {
process.status = 'complete';
showToast(`Process complete for ${process.playlist.name}!`, 'success');
document.getElementById(`cancel-all-btn-${playlistId}`).style.display = 'none';
clearInterval(process.poller);
process.poller = null;
updatePlaylistCardUI(playlistId);
}
}
} catch (error) {
console.error(`Polling error for ${playlistId}:`, error);
// Don't stop polling on transient errors
}
}, 2000);
}
@ -2122,14 +2105,72 @@ async function cancelAllOperations(playlistId) {
const process = activeDownloadProcesses[playlistId];
if (!process) return;
if (process.analysisTaskId) {
await fetch(`/api/tracks/analyze/cancel/${process.analysisTaskId}`, { method: 'POST' });
console.log(`🚫 Cancelling all operations for playlist ${playlistId}`);
try {
// First, try to cancel the entire batch if we have a batch ID
if (process.batchId) {
try {
const batchResponse = await fetch(`/api/playlists/${process.batchId}/cancel_batch`, {
method: 'POST'
});
const batchData = await batchResponse.json();
if (batchData.success) {
console.log(`✅ Cancelled batch ${process.batchId} with ${batchData.cancelled_tasks} tasks`);
}
} catch (error) {
console.warn('Failed to cancel batch, falling back to individual cancellation:', error);
}
}
// Also cancel individual download tasks for immediate UI feedback
const modal = document.getElementById(`download-missing-modal-${playlistId}`);
if (modal) {
const taskRows = modal.querySelectorAll('tr[data-task-id]');
const cancellationPromises = [];
taskRows.forEach(row => {
const taskId = row.dataset.taskId;
const trackIndex = row.dataset.trackIndex;
if (taskId && row.dataset.locallyCancelled !== 'true') {
// Mark as locally cancelled for immediate UI feedback
row.dataset.locallyCancelled = 'true';
const statusElement = document.getElementById(`download-${playlistId}-${trackIndex}`);
const actionsElement = document.getElementById(`actions-${playlistId}-${trackIndex}`);
if (statusElement) statusElement.textContent = '🚫 Cancelled';
if (actionsElement) actionsElement.innerHTML = '-';
// Add to cancellation promises
cancellationPromises.push(
fetch('/api/downloads/cancel_task', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ task_id: taskId })
}).catch(error => {
console.warn(`Failed to cancel task ${taskId}:`, error);
})
);
}
});
// Wait for all cancellations to complete
if (cancellationPromises.length > 0) {
await Promise.allSettled(cancellationPromises);
console.log(`✅ Cancelled ${cancellationPromises.length} individual download tasks`);
}
}
process.status = 'cancelled';
cleanupDownloadProcess(playlistId);
showToast('All operations cancelled', 'info');
} catch (error) {
console.error('Error during cancellation:', error);
process.status = 'cancelled';
cleanupDownloadProcess(playlistId);
showToast('Operations cancelled (with errors)', 'warning');
}
// Note: Batch cancellation isn't implemented on the backend yet,
// but cleaning up the process will stop polling and allow individual cancellations.
process.status = 'cancelled';
cleanupDownloadProcess(playlistId);
showToast('Operations cancelled', 'info');
}
function resetToInitialState() {
@ -3560,7 +3601,7 @@ window.matchedDownloadAlbumTrack = matchedDownloadAlbumTrack;
// Download Missing Tracks Modal functions
window.openDownloadMissingModal = openDownloadMissingModal;
window.closeDownloadMissingModal = closeDownloadMissingModal;
window.startTrackAnalysis = startTrackAnalysis;
window.startMissingTracksProcess = startMissingTracksProcess;
window.cancelAllOperations = cancelAllOperations;
window.cancelTrackDownload = cancelTrackDownload;
window.handleViewProgressClick = handleViewProgressClick;

Loading…
Cancel
Save