feat: 支持cookie发送、保存

This commit is contained in:
tigeren 2025-11-15 12:03:24 +00:00
parent c3fea47248
commit 95f855fd8a
6 changed files with 147 additions and 3 deletions

1
.env Normal file
View File

@ -0,0 +1 @@
PYTHONPATH=./app

View File

@ -1,2 +1,3 @@
docker push 192.168.2.212:3000/tigeren/metube:1.0
docker build -t 192.168.2.212:3000/tigeren/metube:1.0 . docker build -t 192.168.2.212:3000/tigeren/metube:1.0 .
docker push 192.168.2.212:3000/tigeren/metube:1.0

View File

@ -14,6 +14,8 @@ import logging
import json import json
import pathlib import pathlib
import re import re
import base64
from urllib.parse import urlparse
from watchfiles import DefaultFilter, Change, awatch from watchfiles import DefaultFilter, Change, awatch
from ytdl import DownloadQueueNotifier, DownloadQueue from ytdl import DownloadQueueNotifier, DownloadQueue
@ -30,7 +32,7 @@ class Config:
'CUSTOM_DIRS': 'true', 'CUSTOM_DIRS': 'true',
'CREATE_CUSTOM_DIRS': 'true', 'CREATE_CUSTOM_DIRS': 'true',
'CUSTOM_DIRS_EXCLUDE_REGEX': r'(^|/)[.@].*$', 'CUSTOM_DIRS_EXCLUDE_REGEX': r'(^|/)[.@].*$',
'DELETE_FILE_ON_TRASHCAN': 'false', 'DELETE_FILE_ON_TRASHCAN': 'true',
'STATE_DIR': '.', 'STATE_DIR': '.',
'URL_PREFIX': '', 'URL_PREFIX': '',
'PUBLIC_HOST_URL': 'download/', 'PUBLIC_HOST_URL': 'download/',
@ -238,6 +240,93 @@ async def add(request):
status = await dqueue.add(url, quality, format, folder, custom_name_prefix, playlist_strict_mode, playlist_item_limit, auto_start) status = await dqueue.add(url, quality, format, folder, custom_name_prefix, playlist_strict_mode, playlist_item_limit, auto_start)
return web.Response(text=serializer.encode(status)) return web.Response(text=serializer.encode(status))
@routes.post(config.URL_PREFIX + 'cookie')
async def set_cookie(request):
"""Accept cookie string and save as cookie file for domain"""
log.info("Received request to set cookie")
post = await request.json()
url = post.get('url')
cookie = post.get('cookie')
domain = post.get('domain')
if not cookie:
log.error("Bad request: missing 'cookie'")
raise web.HTTPBadRequest()
# Determine domain from either explicit domain field or URL
if not domain:
if url:
parsed_url = urlparse(url)
domain = parsed_url.netloc
else:
log.error("Bad request: missing both 'url' and 'domain'")
raise web.HTTPBadRequest()
log.info(f"Processing cookie for domain: {domain}")
try:
# Decode base64 cookie if it appears to be encoded
try:
# Check if cookie is base64 encoded
decoded_cookie = base64.b64decode(cookie).decode('utf-8')
log.info(f"Cookie was base64 encoded, decoded successfully")
cookie = decoded_cookie
except Exception as e:
# If decoding fails, assume it's already plain text
log.info(f"Cookie is not base64 encoded or decode failed ({e}), using as-is")
log.debug(f"Cookie content: {cookie[:100]}...") # Log first 100 chars
# Create cookies directory if it doesn't exist
cookies_dir = os.path.join(config.STATE_DIR, 'cookies')
os.makedirs(cookies_dir, exist_ok=True)
# Use domain as filename (sanitized)
safe_domain = domain.replace(':', '_').replace('/', '_')
cookie_file = os.path.join(cookies_dir, f'{safe_domain}.txt')
log.info(f"Writing cookie file to: {cookie_file}")
# Convert cookie string to Netscape cookie file format
with open(cookie_file, 'w') as f:
f.write('# Netscape HTTP Cookie File\n')
f.write(f'# This file was generated by MeTube for {domain}\n')
f.write('# Edit at your own risk.\n\n')
# Parse cookie string (format: "key1=value1; key2=value2; ...")
cookie_count = 0
for cookie_pair in cookie.split(';'):
cookie_pair = cookie_pair.strip()
if '=' in cookie_pair:
key, value = cookie_pair.split('=', 1)
key = key.strip()
value = value.strip()
# Netscape format: domain\tflag\tpath\tsecure\texpiration\tname\tvalue
# domain: .domain.com (with leading dot for all subdomains)
# flag: TRUE (include subdomains)
# path: / (all paths)
# secure: FALSE (http and https)
# expiration: 2147483647 (max 32-bit timestamp - Jan 2038)
# name: cookie name
# value: cookie value
f.write(f'.{domain}\tTRUE\t/\tFALSE\t2147483647\t{key}\t{value}\n')
cookie_count += 1
log.debug(f"Added cookie: {key}={value[:20]}...")
log.info(f"Cookie file created successfully with {cookie_count} cookies at {cookie_file}")
return web.Response(text=serializer.encode({
'status': 'ok',
'cookie_file': cookie_file,
'cookie_count': cookie_count,
'msg': f'Cookie saved successfully for {domain} ({cookie_count} cookies)'
}))
except Exception as e:
log.error(f"Error saving cookie: {str(e)}", exc_info=True)
return web.Response(text=serializer.encode({
'status': 'error',
'msg': f'Failed to save cookie: {str(e)}'
}))
@routes.post(config.URL_PREFIX + 'delete') @routes.post(config.URL_PREFIX + 'delete')
async def delete(request): async def delete(request):
post = await request.json() post = await request.json()
@ -368,7 +457,11 @@ except ValueError as e:
async def add_cors(request): async def add_cors(request):
return web.Response(text=serializer.encode({"status": "ok"})) return web.Response(text=serializer.encode({"status": "ok"}))
async def cookie_cors(request):
return web.Response(text=serializer.encode({"status": "ok"}))
app.router.add_route('OPTIONS', config.URL_PREFIX + 'add', add_cors) app.router.add_route('OPTIONS', config.URL_PREFIX + 'add', add_cors)
app.router.add_route('OPTIONS', config.URL_PREFIX + 'cookie', cookie_cors)
async def on_prepare(request, response): async def on_prepare(request, response):
if 'Origin' in request.headers: if 'Origin' in request.headers:

View File

@ -7,6 +7,7 @@ import asyncio
import multiprocessing import multiprocessing
import logging import logging
import re import re
from urllib.parse import urlparse
import yt_dlp.networking.impersonate import yt_dlp.networking.impersonate
from dl_formats import get_format, get_opts, AUDIO_FORMATS from dl_formats import get_format, get_opts, AUDIO_FORMATS
@ -347,6 +348,51 @@ class DownloadQueue:
if playlist_item_limit > 0: if playlist_item_limit > 0:
log.info(f'playlist limit is set. Processing only first {playlist_item_limit} entries') log.info(f'playlist limit is set. Processing only first {playlist_item_limit} entries')
ytdl_options['playlistend'] = playlist_item_limit ytdl_options['playlistend'] = playlist_item_limit
# Check if cookie file exists for this domain
parsed_url = urlparse(dl.url)
domain = parsed_url.netloc
log.info(f"[Cookie] Checking for cookie file for domain: {domain}")
cookies_dir = os.path.join(self.config.STATE_DIR, 'cookies')
log.debug(f"[Cookie] Cookies directory: {cookies_dir}")
# Try domain-specific cookie file
safe_domain = domain.replace(':', '_').replace('/', '_')
cookie_file = os.path.join(cookies_dir, f'{safe_domain}.txt')
log.debug(f"[Cookie] Looking for cookie file at: {cookie_file}")
if os.path.exists(cookie_file):
log.info(f"[Cookie] Found cookie file: {cookie_file}")
# Verify file is readable and has content
try:
with open(cookie_file, 'r') as f:
lines = f.readlines()
cookie_lines = [l for l in lines if l.strip() and not l.startswith('#')]
log.info(f"[Cookie] Cookie file contains {len(cookie_lines)} cookie entries")
if len(cookie_lines) == 0:
log.warning(f"[Cookie] Cookie file exists but contains no cookies!")
else:
log.debug(f"[Cookie] First cookie entry: {cookie_lines[0][:50]}...")
except Exception as e:
log.error(f"[Cookie] Error reading cookie file: {e}", exc_info=True)
ytdl_options['cookiefile'] = cookie_file
log.info(f"[Cookie] Configured yt-dlp to use cookiefile: {cookie_file}")
else:
log.info(f"[Cookie] No cookie file found for domain {domain}")
log.debug(f"[Cookie] Checked path: {cookie_file}")
# List available cookie files for debugging
if os.path.exists(cookies_dir):
available_cookies = os.listdir(cookies_dir)
if available_cookies:
log.debug(f"[Cookie] Available cookie files: {available_cookies}")
else:
log.debug(f"[Cookie] Cookies directory is empty")
else:
log.debug(f"[Cookie] Cookies directory does not exist")
download = Download(dldirectory, self.config.TEMP_DIR, output, output_chapter, dl.quality, dl.format, ytdl_options, dl) download = Download(dldirectory, self.config.TEMP_DIR, output, output_chapter, dl.quality, dl.format, ytdl_options, dl)
if auto_start is True: if auto_start is True:
self.queue.put(download) self.queue.put(download)

View File

@ -27,7 +27,7 @@ services:
# Download behavior # Download behavior
- DOWNLOAD_MODE=limited - DOWNLOAD_MODE=limited
- MAX_CONCURRENT_DOWNLOADS=3 - MAX_CONCURRENT_DOWNLOADS=3
- DELETE_FILE_ON_TRASHCAN=false - DELETE_FILE_ON_TRASHCAN=true
# Custom directories # Custom directories
- CUSTOM_DIRS=true - CUSTOM_DIRS=true

3
pyrightconfig.json Normal file
View File

@ -0,0 +1,3 @@
{
}