Compare commits
3 Commits
9e3cb6ff64
...
47e3d881a1
| Author | SHA1 | Date |
|---|---|---|
|
|
47e3d881a1 | |
|
|
f5024383bf | |
|
|
b042805d46 |
|
|
@ -1,5 +1,5 @@
|
|||
docker build -t 192.168.2.212:3000/tigeren/metube:1.5 .
|
||||
docker build -t 192.168.2.212:3000/tigeren/metube:1.7 .
|
||||
|
||||
docker push 192.168.2.212:3000/tigeren/metube:1.5
|
||||
docker push 192.168.2.212:3000/tigeren/metube:1.7
|
||||
|
||||
docker compose up -d --build --force-recreate
|
||||
14
app/main.py
14
app/main.py
|
|
@ -157,6 +157,10 @@ class Notifier(DownloadQueueNotifier):
|
|||
log.info(f"Notifier: Download cleared - {id}")
|
||||
await sio.emit('cleared', serializer.encode(id))
|
||||
|
||||
async def event(self, event):
|
||||
log.info(f"Notifier: Event - {event['type']}")
|
||||
await sio.emit('event', serializer.encode(event))
|
||||
|
||||
dqueue = DownloadQueue(config, Notifier())
|
||||
app.on_startup.append(lambda app: dqueue.initialize())
|
||||
|
||||
|
|
@ -433,6 +437,16 @@ def version(request):
|
|||
"version": os.getenv("METUBE_VERSION", "dev")
|
||||
})
|
||||
|
||||
@routes.get(config.URL_PREFIX + 'events')
|
||||
def get_events(request):
|
||||
events = dqueue.get_events()
|
||||
return web.Response(text=serializer.encode(events))
|
||||
|
||||
@routes.post(config.URL_PREFIX + 'events/clear')
|
||||
async def clear_events(request):
|
||||
dqueue.clear_events()
|
||||
return web.Response(text=serializer.encode({'status': 'ok'}))
|
||||
|
||||
if config.URL_PREFIX != '/':
|
||||
@routes.get('/')
|
||||
def index_redirect_root(request):
|
||||
|
|
|
|||
348
app/ytdl.py
348
app/ytdl.py
|
|
@ -7,6 +7,8 @@ import asyncio
|
|||
import multiprocessing
|
||||
import logging
|
||||
import re
|
||||
import random
|
||||
import string
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import yt_dlp.networking.impersonate
|
||||
|
|
@ -31,6 +33,9 @@ class DownloadQueueNotifier:
|
|||
async def cleared(self, id):
|
||||
raise NotImplementedError
|
||||
|
||||
async def event(self, event):
|
||||
raise NotImplementedError
|
||||
|
||||
class DownloadInfo:
|
||||
def __init__(self, id, title, url, quality, format, folder, custom_name_prefix, error, entry, playlist_item_limit):
|
||||
self.id = id if len(custom_name_prefix) == 0 else f'{custom_name_prefix}.{id}'
|
||||
|
|
@ -71,11 +76,32 @@ class Download:
|
|||
self.proc = None
|
||||
self.loop = None
|
||||
self.notifier = None
|
||||
self.had_download = False # Track if actual download occurred
|
||||
|
||||
def _download(self):
|
||||
log.info(f"Starting download for: {self.info.title} ({self.info.url})")
|
||||
log.info(f"[TRACE] Download config: download_dir={self.download_dir}, temp_dir={self.temp_dir}")
|
||||
log.info(f"[TRACE] Output template: {self.output_template}")
|
||||
try:
|
||||
def put_status(st):
|
||||
# Log every status update to trace the flow
|
||||
status_type = st.get('status', 'unknown')
|
||||
if status_type == 'downloading':
|
||||
# Mark that we're actually downloading (not skipping)
|
||||
self.had_download = True
|
||||
if 'tmpfilename' in st:
|
||||
log.debug(f"[TRACE] Downloading - tmpfile: {st.get('tmpfilename')}")
|
||||
elif status_type == 'finished':
|
||||
log.info(f"[TRACE] put_status FINISHED - filename: {st.get('filename')}, tmpfilename: {st.get('tmpfilename')}")
|
||||
log.info(f"[TRACE] had_download flag: {self.had_download}")
|
||||
if st.get('filename'):
|
||||
exists = os.path.exists(st['filename'])
|
||||
log.info(f"[TRACE] File exists at reported location? {exists}")
|
||||
if exists:
|
||||
log.info(f"[TRACE] File size: {os.path.getsize(st['filename'])} bytes")
|
||||
elif status_type == 'error':
|
||||
log.error(f"[TRACE] put_status ERROR - msg: {st.get('msg')}")
|
||||
|
||||
self.status_queue.put({k: v for k, v in st.items() if k in (
|
||||
'tmpfilename',
|
||||
'filename',
|
||||
|
|
@ -89,12 +115,87 @@ class Download:
|
|||
)})
|
||||
|
||||
def put_status_postprocessor(d):
|
||||
log.info(f"[TRACE] ===== POSTPROCESSOR CALLED =====")
|
||||
log.info(f"[TRACE] Postprocessor: {d.get('postprocessor')}, Status: {d.get('status')}")
|
||||
|
||||
if d['postprocessor'] == 'MoveFiles' and d['status'] == 'finished':
|
||||
log.info(f"[TRACE] MoveFiles postprocessor triggered")
|
||||
log.info(f"[TRACE] had_download flag in postprocessor: {self.had_download}")
|
||||
log.info(f"[TRACE] info_dict keys: {list(d['info_dict'].keys())}")
|
||||
log.info(f"[TRACE] info_dict filepath: {d['info_dict'].get('filepath')}")
|
||||
log.info(f"[TRACE] info_dict __finaldir: {d['info_dict'].get('__finaldir')}")
|
||||
|
||||
if '__finaldir' in d['info_dict']:
|
||||
filename = os.path.join(d['info_dict']['__finaldir'], os.path.basename(d['info_dict']['filepath']))
|
||||
else:
|
||||
filename = d['info_dict']['filepath']
|
||||
self.status_queue.put({'status': 'finished', 'filename': filename})
|
||||
|
||||
log.info(f"[TRACE] Resolved filename: {filename}")
|
||||
log.info(f"[TRACE] File exists? {os.path.exists(filename)}")
|
||||
|
||||
# List files in directory
|
||||
dir_name = os.path.dirname(filename)
|
||||
if os.path.isdir(dir_name):
|
||||
all_files = os.listdir(dir_name)
|
||||
log.info(f"[TRACE] Files in {dir_name}: {all_files}")
|
||||
|
||||
# Check if file exists at expected location
|
||||
if os.path.exists(filename):
|
||||
log.info(f"[TRACE] File FOUND at expected location")
|
||||
|
||||
# If yt-dlp didn't actually download (skipped), just report the existing file
|
||||
if not self.had_download:
|
||||
log.info(f"[TRACE] No actual download occurred - yt-dlp reused existing file")
|
||||
log.info(f"[TRACE] Sending status with existing filename: {filename}")
|
||||
self.status_queue.put({'status': 'finished', 'filename': filename})
|
||||
else:
|
||||
# Actual download happened - check for conflicts
|
||||
log.info(f"[TRACE] Actual download occurred - checking for conflicts")
|
||||
base_name = os.path.basename(filename)
|
||||
name, ext = os.path.splitext(base_name)
|
||||
|
||||
# Look for other files with same base name (excluding current file)
|
||||
other_files = []
|
||||
if os.path.isdir(dir_name):
|
||||
for existing_file in os.listdir(dir_name):
|
||||
if existing_file == base_name:
|
||||
log.debug(f"[TRACE] Skipping current file: {existing_file}")
|
||||
continue # Skip the current file
|
||||
existing_name, existing_ext = os.path.splitext(existing_file)
|
||||
# Check for exact name match
|
||||
if existing_ext == ext and existing_name == name:
|
||||
log.info(f"[TRACE] Found matching file: {existing_file}")
|
||||
other_files.append(existing_file)
|
||||
|
||||
log.info(f"[TRACE] Found {len(other_files)} other files with same base name: {other_files}")
|
||||
|
||||
# If other files exist with same name, we have a duplicate - rename the NEW file
|
||||
if len(other_files) > 0:
|
||||
log.info(f"[TRACE] CONFLICT DETECTED! Other files: {other_files}")
|
||||
unique_id = ''.join(random.choices(string.ascii_lowercase + string.digits, k=5))
|
||||
new_filename = f"{name}_{unique_id}{ext}"
|
||||
new_filepath = os.path.join(dir_name, new_filename)
|
||||
|
||||
log.info(f"[TRACE] Attempting rename: {filename} -> {new_filepath}")
|
||||
try:
|
||||
os.rename(filename, new_filepath)
|
||||
log.warning(f"Filename conflict detected. Renamed: {base_name} → {new_filename}")
|
||||
log.info(f"[TRACE] Rename successful")
|
||||
filename = new_filepath
|
||||
except Exception as e:
|
||||
log.error(f"[TRACE] Rename FAILED: {e}")
|
||||
log.error(f"Failed to rename file due to conflict: {e}")
|
||||
else:
|
||||
log.info(f"[TRACE] No conflict - this is the only file with this name")
|
||||
|
||||
log.info(f"[TRACE] Sending status with filename: {filename}")
|
||||
self.status_queue.put({'status': 'finished', 'filename': filename})
|
||||
else:
|
||||
log.info(f"[TRACE] File NOT FOUND at expected location")
|
||||
base_name = os.path.basename(filename)
|
||||
self.status_queue.put({'status': 'error', 'msg': f'File not found: {base_name}'})
|
||||
else:
|
||||
log.debug(f"[TRACE] Other postprocessor: {d.get('postprocessor')}")
|
||||
|
||||
ret = yt_dlp.YoutubeDL(params={
|
||||
'quiet': True,
|
||||
|
|
@ -128,6 +229,22 @@ class Download:
|
|||
asyncio.create_task(self.update_status())
|
||||
return await self.loop.run_in_executor(None, self.proc.join)
|
||||
|
||||
def _resolve_filename_conflict(self, filepath):
|
||||
"""
|
||||
Resolve filename conflicts by appending a short unique ID.
|
||||
Returns the final non-conflicting filepath.
|
||||
"""
|
||||
dir_name = os.path.dirname(filepath)
|
||||
base_name = os.path.basename(filepath)
|
||||
name, ext = os.path.splitext(base_name)
|
||||
|
||||
# Generate a short unique ID (5 alphanumeric characters)
|
||||
unique_id = ''.join(random.choices(string.ascii_lowercase + string.digits, k=5))
|
||||
new_filename = f"{name}_{unique_id}{ext}"
|
||||
new_filepath = os.path.join(dir_name, new_filename)
|
||||
|
||||
return new_filepath
|
||||
|
||||
def cancel(self):
|
||||
log.info(f"Cancelling download: {self.info.title}")
|
||||
if self.running():
|
||||
|
|
@ -250,6 +367,15 @@ class DownloadQueue:
|
|||
elif self.config.DOWNLOAD_MODE == 'limited':
|
||||
self.semaphore = asyncio.Semaphore(int(self.config.MAX_CONCURRENT_DOWNLOADS))
|
||||
|
||||
# PreCheck queue for sequential conflict detection (no locks needed)
|
||||
self.precheck_queue = asyncio.Queue()
|
||||
self.reserved_filenames = set() # Track filenames being processed
|
||||
self.precheck_in_progress = {} # Track URL -> DownloadInfo for items in precheck queue
|
||||
|
||||
# Event notifications (keep last 5 in memory)
|
||||
self.events = [] # List of {type, message, timestamp, url}
|
||||
self.max_events = 5
|
||||
|
||||
self.done.load()
|
||||
|
||||
async def __import_queue(self):
|
||||
|
|
@ -262,9 +388,154 @@ class DownloadQueue:
|
|||
|
||||
async def initialize(self):
|
||||
log.info("Initializing DownloadQueue")
|
||||
# Start the precheck worker for sequential conflict detection
|
||||
asyncio.create_task(self.__precheck_worker())
|
||||
asyncio.create_task(self.__import_queue())
|
||||
asyncio.create_task(self.__import_pending())
|
||||
|
||||
async def __precheck_worker(self):
|
||||
"""Background worker that processes precheck queue sequentially.
|
||||
Sequential processing naturally prevents race conditions without locks."""
|
||||
log.info("[PreCheck] Worker started")
|
||||
while True:
|
||||
try:
|
||||
# Get next item from queue (blocks if empty)
|
||||
item = await self.precheck_queue.get()
|
||||
log.debug(f"[PreCheck] Processing item: {item['dl'].url}")
|
||||
|
||||
# Process the precheck and start download
|
||||
await self.__process_precheck(item)
|
||||
|
||||
# Mark task as done
|
||||
self.precheck_queue.task_done()
|
||||
except Exception as e:
|
||||
log.error(f"[PreCheck] Worker error: {e}", exc_info=True)
|
||||
|
||||
async def __process_precheck(self, item):
|
||||
"""Process a single download with conflict detection.
|
||||
Called sequentially by worker - no race conditions possible."""
|
||||
dl = item['dl']
|
||||
auto_start = item['auto_start']
|
||||
dldirectory = item['dldirectory']
|
||||
output = item['output']
|
||||
output_chapter = item['output_chapter']
|
||||
ytdl_options = item['ytdl_options']
|
||||
entry = item['entry']
|
||||
|
||||
log.info(f"[PreCheck] Checking for filename conflicts before download")
|
||||
log.debug(f"[PreCheck] Original output template: {output}")
|
||||
|
||||
# Try to predict the filename that yt-dlp will generate
|
||||
if entry and 'title' in entry:
|
||||
# Check if we have the real title or just a placeholder
|
||||
title = entry.get('title', '')
|
||||
video_id = entry.get('id', '')
|
||||
|
||||
# If title looks like a placeholder (contains the ID), we need full extraction
|
||||
needs_full_extraction = (
|
||||
not title or # No title
|
||||
title == f"twitter video #{video_id}" or # Placeholder pattern
|
||||
video_id in title # ID is in title (likely placeholder)
|
||||
)
|
||||
|
||||
if needs_full_extraction:
|
||||
log.debug(f"[PreCheck] Title appears to be placeholder: '{title}', doing full info extraction")
|
||||
try:
|
||||
# Do a full (non-flat) extraction to get real title
|
||||
full_entry = await asyncio.get_running_loop().run_in_executor(
|
||||
None,
|
||||
lambda: yt_dlp.YoutubeDL(params={
|
||||
'quiet': True,
|
||||
'no_color': True,
|
||||
'extract_flat': False, # Full extraction
|
||||
'skip_download': True, # Don't download, just get info
|
||||
'paths': {"home": dldirectory, "temp": self.config.TEMP_DIR},
|
||||
**ytdl_options,
|
||||
}).extract_info(dl.url, download=False)
|
||||
)
|
||||
if full_entry and 'title' in full_entry:
|
||||
title = full_entry['title']
|
||||
log.debug(f"[PreCheck] Got real title from full extraction: '{title}'")
|
||||
except Exception as e:
|
||||
log.warning(f"[PreCheck] Failed to get full info: {e}, using placeholder title")
|
||||
|
||||
predicted_filename = output
|
||||
# Replace title
|
||||
if '%(title)s' in predicted_filename:
|
||||
predicted_filename = predicted_filename.replace('%(title)s', title)
|
||||
|
||||
# Replace id
|
||||
if '%(id)s' in predicted_filename and video_id:
|
||||
predicted_filename = predicted_filename.replace('%(id)s', video_id)
|
||||
|
||||
# Handle ext specially - default to format's extension if not in entry
|
||||
if '%(ext)s' in predicted_filename:
|
||||
ext = entry.get('ext', dl.format if dl.format in ['mp4', 'mkv', 'webm', 'mp3', 'm4a'] else 'mp4')
|
||||
predicted_filename = predicted_filename.replace('%(ext)s', ext)
|
||||
|
||||
predicted_filepath = os.path.join(dldirectory, predicted_filename)
|
||||
log.info(f"[PreCheck] Predicted filepath: {predicted_filepath}")
|
||||
|
||||
# Check if file already exists OR is reserved by another download in queue
|
||||
# Sequential processing means we check one at a time - no race condition
|
||||
if os.path.exists(predicted_filepath) or predicted_filepath in self.reserved_filenames:
|
||||
if predicted_filepath in self.reserved_filenames:
|
||||
log.warning(f"[PreCheck] Filename is reserved by pending download! Will append unique ID")
|
||||
else:
|
||||
log.warning(f"[PreCheck] File already exists! Will append unique ID to avoid conflict")
|
||||
|
||||
# Generate unique ID
|
||||
unique_id = ''.join(random.choices(string.ascii_lowercase + string.digits, k=5))
|
||||
|
||||
# Modify output template to include unique ID before extension
|
||||
# Change "%(title)s.%(ext)s" to "%(title)s_XXXXX.%(ext)s"
|
||||
if '.%(ext)s' in output:
|
||||
output = output.replace('.%(ext)s', f'_{unique_id}.%(ext)s')
|
||||
else:
|
||||
# Fallback: append to end
|
||||
output = f"{output}_{unique_id}"
|
||||
|
||||
# Re-predict the new filename
|
||||
predicted_filename = output
|
||||
if '%(title)s' in predicted_filename:
|
||||
predicted_filename = predicted_filename.replace('%(title)s', title)
|
||||
if '%(id)s' in predicted_filename and video_id:
|
||||
predicted_filename = predicted_filename.replace('%(id)s', video_id)
|
||||
if '%(ext)s' in predicted_filename:
|
||||
ext = entry.get('ext', dl.format if dl.format in ['mp4', 'mkv', 'webm', 'mp3', 'm4a'] else 'mp4')
|
||||
predicted_filename = predicted_filename.replace('%(ext)s', ext)
|
||||
predicted_filepath = os.path.join(dldirectory, predicted_filename)
|
||||
|
||||
log.info(f"[PreCheck] Modified output template: {output}")
|
||||
log.info(f"[PreCheck] New predicted filepath: {predicted_filepath}")
|
||||
else:
|
||||
log.info(f"[PreCheck] No conflict detected, using original template")
|
||||
|
||||
# Reserve this filename to prevent concurrent downloads from using it
|
||||
self.reserved_filenames.add(predicted_filepath)
|
||||
log.debug(f"[PreCheck] Reserved filename: {predicted_filepath}")
|
||||
else:
|
||||
predicted_filepath = None
|
||||
log.debug(f"[PreCheck] No entry data available, skipping pre-check")
|
||||
|
||||
log.debug(f"final resolved output template: {output}")
|
||||
download = Download(dldirectory, self.config.TEMP_DIR, output, output_chapter, dl.quality, dl.format, ytdl_options, dl)
|
||||
|
||||
# Store the reserved filepath for cleanup
|
||||
download.reserved_filepath = predicted_filepath
|
||||
|
||||
# Remove from in-progress set before adding to queue
|
||||
# This allows checking queue.exists() to work properly
|
||||
if dl.url in self.precheck_in_progress:
|
||||
del self.precheck_in_progress[dl.url]
|
||||
log.debug(f"[PreCheck] Removed from in-progress tracking: {dl.url}")
|
||||
|
||||
if auto_start is True:
|
||||
self.queue.put(download)
|
||||
asyncio.create_task(self.__start_download(download))
|
||||
else:
|
||||
self.pending.put(download)
|
||||
|
||||
async def __start_download(self, download):
|
||||
if download.canceled:
|
||||
log.info(f"Download {download.info.title} was canceled, skipping start.")
|
||||
|
|
@ -296,6 +567,12 @@ class DownloadQueue:
|
|||
self._post_download_cleanup(download)
|
||||
|
||||
def _post_download_cleanup(self, download):
|
||||
# Release filename reservation if it exists
|
||||
if hasattr(download, 'reserved_filepath') and download.reserved_filepath:
|
||||
if download.reserved_filepath in self.reserved_filenames:
|
||||
self.reserved_filenames.discard(download.reserved_filepath)
|
||||
log.debug(f"[PreCheck] Released reservation for: {download.reserved_filepath}")
|
||||
|
||||
if download.info.status != 'finished':
|
||||
if download.tmpfilename and os.path.isfile(download.tmpfilename):
|
||||
try:
|
||||
|
|
@ -342,18 +619,34 @@ class DownloadQueue:
|
|||
return dldirectory, None
|
||||
|
||||
async def __add_download(self, dl, auto_start):
|
||||
"""Fast path: validate and queue for precheck processing.
|
||||
Returns immediately without blocking on slow operations."""
|
||||
# Check if this exact URL is already being processed, in queue, or already downloaded
|
||||
# This prevents duplicate downloads when same URL is submitted multiple times
|
||||
if (dl.url in self.precheck_in_progress or
|
||||
self.queue.exists(dl.url) or
|
||||
self.pending.exists(dl.url) or
|
||||
self.done.exists(dl.url)):
|
||||
log.info(f"[PreCheck] URL already queued/processing/downloaded, skipping: {dl.url}")
|
||||
# Add event notification
|
||||
self._add_event('duplicate_skipped', 'URL already in queue or downloaded', dl.url)
|
||||
return {'status': 'ok', 'msg': 'Download already exists'}
|
||||
|
||||
dldirectory, error_message = self.__calc_download_path(dl.quality, dl.format, dl.folder)
|
||||
if error_message is not None:
|
||||
return error_message
|
||||
|
||||
output = self.config.OUTPUT_TEMPLATE if len(dl.custom_name_prefix) == 0 else f'{dl.custom_name_prefix}.{self.config.OUTPUT_TEMPLATE}'
|
||||
output_chapter = self.config.OUTPUT_TEMPLATE_CHAPTER
|
||||
entry = getattr(dl, 'entry', None)
|
||||
|
||||
if entry is not None and 'playlist' in entry and entry['playlist'] is not None:
|
||||
if len(self.config.OUTPUT_TEMPLATE_PLAYLIST):
|
||||
output = self.config.OUTPUT_TEMPLATE_PLAYLIST
|
||||
for property, value in entry.items():
|
||||
if property.startswith("playlist"):
|
||||
output = output.replace(f"%({property})s", str(value))
|
||||
|
||||
ytdl_options = dict(self.config.YTDL_OPTIONS)
|
||||
playlist_item_limit = getattr(dl, 'playlist_item_limit', 0)
|
||||
if playlist_item_limit > 0:
|
||||
|
|
@ -404,12 +697,23 @@ class DownloadQueue:
|
|||
else:
|
||||
log.debug(f"[Cookie] Cookies directory does not exist")
|
||||
|
||||
download = Download(dldirectory, self.config.TEMP_DIR, output, output_chapter, dl.quality, dl.format, ytdl_options, dl)
|
||||
if auto_start is True:
|
||||
self.queue.put(download)
|
||||
asyncio.create_task(self.__start_download(download))
|
||||
else:
|
||||
self.pending.put(download)
|
||||
# Mark URL as being processed to prevent duplicates
|
||||
# Store the DownloadInfo so we can display it in UI
|
||||
self.precheck_in_progress[dl.url] = dl
|
||||
|
||||
# Queue for sequential precheck processing (fast, non-blocking)
|
||||
await self.precheck_queue.put({
|
||||
'dl': dl,
|
||||
'auto_start': auto_start,
|
||||
'dldirectory': dldirectory,
|
||||
'output': output,
|
||||
'output_chapter': output_chapter,
|
||||
'ytdl_options': ytdl_options,
|
||||
'entry': entry,
|
||||
})
|
||||
log.debug(f"[PreCheck] Queued for processing: {dl.url}")
|
||||
|
||||
# Notify immediately (fast response to user)
|
||||
await self.notifier.added(dl)
|
||||
|
||||
async def __add_entry(self, entry, quality, format, folder, custom_name_prefix, playlist_strict_mode, playlist_item_limit, auto_start, already):
|
||||
|
|
@ -575,6 +879,34 @@ class DownloadQueue:
|
|||
else:
|
||||
v.info.file_exists = False
|
||||
|
||||
return (list((k, v.info) for k, v in self.queue.items()) +
|
||||
# Create list from items in precheck queue
|
||||
# These items have 'preparing' status to indicate they're being analyzed
|
||||
precheck_list = [(dl.url, dl) for dl in self.precheck_in_progress.values()]
|
||||
|
||||
return (precheck_list +
|
||||
list((k, v.info) for k, v in self.queue.items()) +
|
||||
list((k, v.info) for k, v in self.pending.items()),
|
||||
list((k, v.info) for k, v in self.done.items()))
|
||||
|
||||
def _add_event(self, event_type, message, url=None):
|
||||
"""Add an event to the events list (keep only last 5)."""
|
||||
event = {
|
||||
'type': event_type,
|
||||
'message': message,
|
||||
'timestamp': int(time.time()),
|
||||
'url': url
|
||||
}
|
||||
self.events.append(event)
|
||||
# Keep only last 5 events
|
||||
if len(self.events) > self.max_events:
|
||||
self.events = self.events[-self.max_events:]
|
||||
# Notify frontend via WebSocket
|
||||
asyncio.create_task(self.notifier.event(event))
|
||||
|
||||
def get_events(self):
|
||||
"""Get all events (last 5)."""
|
||||
return self.events
|
||||
|
||||
def clear_events(self):
|
||||
"""Clear all events."""
|
||||
self.events = []
|
||||
|
|
|
|||
|
|
@ -0,0 +1,15 @@
|
|||
# Netscape HTTP Cookie File
|
||||
# This file was generated by MeTube for x.com
|
||||
# Edit at your own risk.
|
||||
|
||||
.x.com TRUE / FALSE 2147483647 guest_id_marketing v1%3A174926760709319082
|
||||
.x.com TRUE / FALSE 2147483647 guest_id_ads v1%3A174926760709319082
|
||||
.x.com TRUE / FALSE 2147483647 guest_id v1%3A174926760709319082
|
||||
.x.com TRUE / FALSE 2147483647 kdt 9iUxZjU74WWkK56ZFVmvbjiPFpXrygUNFX4csJcs
|
||||
.x.com TRUE / FALSE 2147483647 auth_token 10809341e7fe595d15fbc996e70980c22cb70f8c
|
||||
.x.com TRUE / FALSE 2147483647 ct0 06ce02804dc8a16caf6284037670b562f032772cba1564ffca127f2bba32cee26c8cd6e712a303a3c2aaec249b1d3a4f59ff6c18b14d820452bc6df413201ad0e3479931ea087440452fb28875c2940b
|
||||
.x.com TRUE / FALSE 2147483647 twid u%3D1822106795885711360
|
||||
.x.com TRUE / FALSE 2147483647 personalization_id "v1_zsnoqRdX3mu5xkn8FOf+ow=="
|
||||
.x.com TRUE / FALSE 2147483647 __cuid 1647a43fa2df49e78267470c106852f0
|
||||
.x.com TRUE / FALSE 2147483647 lang en
|
||||
.x.com TRUE / FALSE 2147483647 __cf_bm 5yMM.8sCR5gmerfK_rblxrbSNxP0FcGofwQN50bXbDs-1763900717.3479998-1.0.1.1-.2RheBoOtA7..8D40mwSGqMhp2PAbu_UdDkaGgW59aqx0L8Jhj9.1Vea_7pHH95V2JXITFm8l3mSx.RN1o6eCNS1MzDcyrwf_nXcIDQh_CMBaP.gkjYj6_zEu_8nc0S2
|
||||
|
|
@ -67,6 +67,41 @@
|
|||
</nav>
|
||||
|
||||
<main role="main" class="container container-xl">
|
||||
<!-- Events Display Area -->
|
||||
<div *ngIf="events.length > 0" class="events-container mb-3">
|
||||
<div class="events-header">
|
||||
<div class="d-flex align-items-center justify-content-between">
|
||||
<h6 class="mb-0">
|
||||
<fa-icon [icon]="faClock" class="me-2"></fa-icon>
|
||||
Recent Events
|
||||
</h6>
|
||||
<button type="button"
|
||||
class="btn btn-sm btn-outline-secondary"
|
||||
(click)="clearEvents()"
|
||||
title="Clear all events">
|
||||
<fa-icon [icon]="faTrashAlt" class="me-1"></fa-icon>
|
||||
Clear
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="events-list">
|
||||
<div *ngFor="let event of events" class="event-item" [ngClass]="'event-' + event.type">
|
||||
<div class="event-icon">
|
||||
<fa-icon [icon]="faTimesCircle"></fa-icon>
|
||||
</div>
|
||||
<div class="event-content">
|
||||
<div class="event-message">{{ event.message }}</div>
|
||||
<div class="event-url" *ngIf="event.url" [title]="event.url">
|
||||
{{ event.url }}
|
||||
</div>
|
||||
</div>
|
||||
<div class="event-time">
|
||||
{{ getRelativeTime(event.timestamp) }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<form #f="ngForm">
|
||||
<div class="container add-url-box">
|
||||
<!-- Main URL Input with Download Button -->
|
||||
|
|
|
|||
|
|
@ -236,3 +236,86 @@ main
|
|||
|
||||
fa-icon
|
||||
vertical-align: middle
|
||||
|
||||
// Events display area
|
||||
.events-container
|
||||
background: var(--bs-body-bg)
|
||||
border: 1px solid var(--bs-border-color)
|
||||
border-radius: 0.5rem
|
||||
overflow: hidden
|
||||
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1)
|
||||
margin-top: 1rem
|
||||
|
||||
.events-header
|
||||
padding: 0.75rem 1rem
|
||||
background: var(--bs-secondary-bg)
|
||||
border-bottom: 1px solid var(--bs-border-color)
|
||||
|
||||
h6
|
||||
color: var(--bs-secondary-color)
|
||||
font-weight: 600
|
||||
display: flex
|
||||
align-items: center
|
||||
|
||||
fa-icon
|
||||
opacity: 0.7
|
||||
|
||||
.events-list
|
||||
max-height: 300px
|
||||
overflow-y: auto
|
||||
|
||||
.event-item
|
||||
display: flex
|
||||
align-items: flex-start
|
||||
padding: 0.875rem 1rem
|
||||
border-bottom: 1px solid var(--bs-border-color)
|
||||
transition: background-color 0.2s
|
||||
|
||||
&:last-child
|
||||
border-bottom: none
|
||||
|
||||
&:hover
|
||||
background: var(--bs-tertiary-bg)
|
||||
|
||||
.event-icon
|
||||
flex-shrink: 0
|
||||
width: 32px
|
||||
height: 32px
|
||||
display: flex
|
||||
align-items: center
|
||||
justify-content: center
|
||||
border-radius: 50%
|
||||
margin-right: 0.75rem
|
||||
|
||||
fa-icon
|
||||
font-size: 1.1rem
|
||||
|
||||
&.event-duplicate_skipped
|
||||
.event-icon
|
||||
background: rgba(255, 193, 7, 0.15)
|
||||
color: #ffc107
|
||||
|
||||
.event-content
|
||||
flex: 1
|
||||
min-width: 0
|
||||
|
||||
.event-message
|
||||
font-size: 0.9375rem
|
||||
color: var(--bs-body-color)
|
||||
margin-bottom: 0.25rem
|
||||
font-weight: 500
|
||||
|
||||
.event-url
|
||||
font-size: 0.8125rem
|
||||
color: var(--bs-secondary-color)
|
||||
white-space: nowrap
|
||||
overflow: hidden
|
||||
text-overflow: ellipsis
|
||||
font-family: 'Courier New', monospace
|
||||
|
||||
.event-time
|
||||
flex-shrink: 0
|
||||
font-size: 0.75rem
|
||||
color: var(--bs-secondary-color)
|
||||
margin-left: 0.75rem
|
||||
whitespace: nowrap
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { Component, ViewChild, ElementRef, AfterViewInit } from '@angular/core';
|
||||
import { Component, ViewChild, ElementRef, AfterViewInit, OnInit } from '@angular/core';
|
||||
import { HttpClient } from '@angular/common/http';
|
||||
import { faTrashAlt, faCheckCircle, faTimesCircle, IconDefinition } from '@fortawesome/free-regular-svg-icons';
|
||||
import { faRedoAlt, faSun, faMoon, faCircleHalfStroke, faCheck, faExternalLinkAlt, faDownload, faFileImport, faFileExport, faCopy, faClock, faTachometerAlt } from '@fortawesome/free-solid-svg-icons';
|
||||
|
|
@ -18,7 +18,7 @@ import {KeyValue} from "@angular/common";
|
|||
styleUrls: ['./app.component.sass'],
|
||||
standalone: false
|
||||
})
|
||||
export class AppComponent implements AfterViewInit {
|
||||
export class AppComponent implements OnInit, AfterViewInit {
|
||||
addUrl: string;
|
||||
formats: Format[] = Formats;
|
||||
qualities: Quality[];
|
||||
|
|
@ -78,6 +78,9 @@ export class AppComponent implements AfterViewInit {
|
|||
faClock = faClock;
|
||||
faTachometerAlt = faTachometerAlt;
|
||||
|
||||
// Events from backend (last 5 events)
|
||||
events: Array<{type: string, message: string, timestamp: number, url: string}> = [];
|
||||
|
||||
constructor(public downloads: DownloadsService, private cookieService: CookieService, private http: HttpClient) {
|
||||
this.format = cookieService.get('metube_format') || 'any';
|
||||
// Needs to be set or qualities won't automatically be set
|
||||
|
|
@ -98,6 +101,15 @@ export class AppComponent implements AfterViewInit {
|
|||
this.downloads.updated.subscribe(() => {
|
||||
this.updateMetrics();
|
||||
});
|
||||
// Subscribe to events
|
||||
this.downloads.eventReceived.subscribe((event: any) => {
|
||||
console.debug('Event received in component:', event);
|
||||
// Add to events array (keep last 5)
|
||||
this.events.push(event);
|
||||
if (this.events.length > 5) {
|
||||
this.events = this.events.slice(-5);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ngOnInit() {
|
||||
|
|
@ -106,6 +118,9 @@ export class AppComponent implements AfterViewInit {
|
|||
this.customDirs$ = this.getMatchingCustomDir();
|
||||
this.setTheme(this.activeTheme);
|
||||
|
||||
// Load events from backend
|
||||
this.loadEvents();
|
||||
|
||||
window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', () => {
|
||||
if (this.activeTheme.id === 'auto') {
|
||||
this.setTheme(this.activeTheme);
|
||||
|
|
@ -269,11 +284,38 @@ export class AppComponent implements AfterViewInit {
|
|||
alert(`Error adding URL: ${status.msg}`);
|
||||
} else {
|
||||
this.addUrl = '';
|
||||
// Reload events after adding
|
||||
this.loadEvents();
|
||||
}
|
||||
this.addInProgress = false;
|
||||
});
|
||||
}
|
||||
|
||||
// Load events from backend
|
||||
loadEvents() {
|
||||
this.http.get('/events').subscribe((events: any[]) => {
|
||||
this.events = events || [];
|
||||
});
|
||||
}
|
||||
|
||||
// Clear all events
|
||||
clearEvents() {
|
||||
this.http.post('/events/clear', {}).subscribe(() => {
|
||||
this.events = [];
|
||||
});
|
||||
}
|
||||
|
||||
// Format timestamp to relative time
|
||||
getRelativeTime(timestamp: number): string {
|
||||
const now = Date.now() / 1000;
|
||||
const diff = Math.floor(now - timestamp);
|
||||
|
||||
if (diff < 60) return 'just now';
|
||||
if (diff < 3600) return `${Math.floor(diff / 60)}m ago`;
|
||||
if (diff < 86400) return `${Math.floor(diff / 3600)}h ago`;
|
||||
return `${Math.floor(diff / 86400)}d ago`;
|
||||
}
|
||||
|
||||
downloadItemByKey(id: string) {
|
||||
this.downloads.startById([id]).subscribe();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -44,6 +44,7 @@ export class DownloadsService {
|
|||
ytdlOptionsChanged = new Subject();
|
||||
configurationChanged = new Subject();
|
||||
updated = new Subject();
|
||||
eventReceived = new Subject(); // New subject for events
|
||||
|
||||
configuration = {};
|
||||
customDirs = {};
|
||||
|
|
@ -112,6 +113,11 @@ export class DownloadsService {
|
|||
let data = JSON.parse(strdata);
|
||||
this.ytdlOptionsChanged.next(data);
|
||||
});
|
||||
socket.fromEvent('event').subscribe((strdata: string) => {
|
||||
let event = JSON.parse(strdata);
|
||||
console.debug('Received event:', event);
|
||||
this.eventReceived.next(event);
|
||||
});
|
||||
}
|
||||
|
||||
handleHTTPError(error: HttpErrorResponse) {
|
||||
|
|
|
|||
Loading…
Reference in New Issue