Revert "Fix race condition preventing failed tracks from being added to wishlist"

This reverts commit c0c05c7b89.
pull/122/head^2
Broque Thomas 4 months ago
parent ceafcdc49f
commit 375dcb8a19

@ -11447,9 +11447,8 @@ def _process_failed_tracks_to_wishlist_exact(batch_id):
with tasks_lock:
if batch_id in download_batches:
download_batches[batch_id]['wishlist_summary'] = completion_summary
download_batches[batch_id]['wishlist_processing_complete'] = True
# Phase already set to 'complete' in _on_download_completed
print(f"✅ [Wishlist Processing] Completed wishlist processing for batch {batch_id}")
return completion_summary
@ -11470,7 +11469,6 @@ def _process_failed_tracks_to_wishlist_exact(batch_id):
'total_failed': 0,
'error_message': str(e)
}
download_batches[batch_id]['wishlist_processing_complete'] = True
except Exception as lock_error:
print(f"❌ [Wishlist Processing] Failed to update batch after error: {lock_error}")
@ -11565,7 +11563,7 @@ def _on_download_completed(batch_id, task_id, success=True):
# Build track_info structure matching sync.py's permanently_failed_tracks format
original_track_info = task.get('track_info', {})
# Ensure spotify_track has proper structure for wishlist service
spotify_track_data = _ensure_spotify_track_format(original_track_info)
@ -11719,10 +11717,7 @@ def _on_download_completed(batch_id, task_id, success=True):
print(f"🎉 [Batch Manager] Batch {batch_id} complete - stopping monitor")
download_monitor.stop_monitoring(batch_id)
# Mark that wishlist processing is starting (prevents premature cleanup)
batch['wishlist_processing_started'] = True
# Process wishlist outside of the lock to prevent threading issues
if is_auto_batch:
# For auto-initiated batches, handle completion and schedule next cycle
@ -13829,22 +13824,9 @@ def cleanup_batch():
with tasks_lock:
# Check if the batch exists before trying to delete
if batch_id in download_batches:
batch = download_batches[batch_id]
# CRITICAL: Don't allow cleanup if wishlist processing is in progress
# This prevents a race condition where cleanup deletes the batch before
# the wishlist processing thread can access it
if batch.get('wishlist_processing_started') and not batch.get('wishlist_processing_complete'):
print(f"⏳ [Cleanup] Batch {batch_id} cleanup deferred - wishlist processing in progress")
return jsonify({
"success": False,
"error": "Batch cleanup deferred - wishlist processing in progress",
"deferred": True
}), 202 # 202 = Accepted but not yet processed
# Get the list of task IDs before deleting the batch
task_ids_to_remove = batch.get('queue', [])
task_ids_to_remove = download_batches[batch_id].get('queue', [])
# Delete the batch record
del download_batches[batch_id]

@ -5068,31 +5068,12 @@ async function cleanupDownloadProcess(playlistId) {
if (process.batchId) {
try {
console.log(`🚀 Sending cleanup request to server for batch: ${process.batchId}`);
const response = await fetch('/api/playlists/cleanup_batch', {
await fetch('/api/playlists/cleanup_batch', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ batch_id: process.batchId })
});
// Handle deferred cleanup (202 = wishlist processing in progress)
if (response.status === 202) {
console.log(`⏳ Wishlist processing in progress for batch ${process.batchId}, will retry cleanup in 2s...`);
// Retry cleanup after delay to allow wishlist processing to complete
setTimeout(async () => {
try {
await fetch('/api/playlists/cleanup_batch', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ batch_id: process.batchId })
});
console.log(`✅ Delayed cleanup completed for batch: ${process.batchId}`);
} catch (error) {
console.warn(`⚠️ Delayed cleanup failed:`, error);
}
}, 2000); // 2 second delay
} else {
console.log(`✅ Server cleanup completed for batch: ${process.batchId}`);
}
console.log(`✅ Server cleanup completed for batch: ${process.batchId}`);
} catch (error) {
console.warn(`⚠️ Failed to send cleanup request to server:`, error);
// Don't show toast for cleanup failures - they're not user-facing

Loading…
Cancel
Save