Compare commits

..

No commits in common. "f6a9e6a1dd1bb60ad1799ee21a91985c02a2b303" and "588119d9eb89f71804ae2023ecdd6f64c2ffe2a2" have entirely different histories.

9 changed files with 14 additions and 317 deletions

1
.env
View File

@ -1 +0,0 @@
PYTHONPATH=./app

View File

@ -1,3 +0,0 @@
docker build -t 192.168.2.212:3000/tigeren/metube:1.1 .
docker push 192.168.2.212:3000/tigeren/metube:1.1

View File

@ -26,8 +26,8 @@ RUN sed -i 's/\r$//g' docker-entrypoint.sh && \
COPY app ./app COPY app ./app
COPY --from=builder /metube/dist/metube ./ui/dist/metube COPY --from=builder /metube/dist/metube ./ui/dist/metube
ENV UID=0 ENV UID=1000
ENV GID=0 ENV GID=1000
ENV UMASK=022 ENV UMASK=022
ENV DOWNLOAD_DIR /downloads ENV DOWNLOAD_DIR /downloads

View File

@ -14,8 +14,6 @@ import logging
import json import json
import pathlib import pathlib
import re import re
import base64
from urllib.parse import urlparse
from watchfiles import DefaultFilter, Change, awatch from watchfiles import DefaultFilter, Change, awatch
from ytdl import DownloadQueueNotifier, DownloadQueue from ytdl import DownloadQueueNotifier, DownloadQueue
@ -32,7 +30,7 @@ class Config:
'CUSTOM_DIRS': 'true', 'CUSTOM_DIRS': 'true',
'CREATE_CUSTOM_DIRS': 'true', 'CREATE_CUSTOM_DIRS': 'true',
'CUSTOM_DIRS_EXCLUDE_REGEX': r'(^|/)[.@].*$', 'CUSTOM_DIRS_EXCLUDE_REGEX': r'(^|/)[.@].*$',
'DELETE_FILE_ON_TRASHCAN': 'true', 'DELETE_FILE_ON_TRASHCAN': 'false',
'STATE_DIR': '.', 'STATE_DIR': '.',
'URL_PREFIX': '', 'URL_PREFIX': '',
'PUBLIC_HOST_URL': 'download/', 'PUBLIC_HOST_URL': 'download/',
@ -117,17 +115,9 @@ config = Config()
class ObjectSerializer(json.JSONEncoder): class ObjectSerializer(json.JSONEncoder):
def default(self, obj): def default(self, obj):
# First try to use __dict__ for custom objects if isinstance(obj, object):
if hasattr(obj, '__dict__'):
return obj.__dict__ return obj.__dict__
# Convert iterables (generators, dict_items, etc.) to lists else:
# Exclude strings and bytes which are also iterable
elif hasattr(obj, '__iter__') and not isinstance(obj, (str, bytes)):
try:
return list(obj)
except:
pass
# Fall back to default behavior
return json.JSONEncoder.default(self, obj) return json.JSONEncoder.default(self, obj)
serializer = ObjectSerializer() serializer = ObjectSerializer()
@ -240,93 +230,6 @@ async def add(request):
status = await dqueue.add(url, quality, format, folder, custom_name_prefix, playlist_strict_mode, playlist_item_limit, auto_start) status = await dqueue.add(url, quality, format, folder, custom_name_prefix, playlist_strict_mode, playlist_item_limit, auto_start)
return web.Response(text=serializer.encode(status)) return web.Response(text=serializer.encode(status))
@routes.post(config.URL_PREFIX + 'cookie')
async def set_cookie(request):
"""Accept cookie string and save as cookie file for domain"""
log.info("Received request to set cookie")
post = await request.json()
url = post.get('url')
cookie = post.get('cookie')
domain = post.get('domain')
if not cookie:
log.error("Bad request: missing 'cookie'")
raise web.HTTPBadRequest()
# Determine domain from either explicit domain field or URL
if not domain:
if url:
parsed_url = urlparse(url)
domain = parsed_url.netloc
else:
log.error("Bad request: missing both 'url' and 'domain'")
raise web.HTTPBadRequest()
log.info(f"Processing cookie for domain: {domain}")
try:
# Decode base64 cookie if it appears to be encoded
try:
# Check if cookie is base64 encoded
decoded_cookie = base64.b64decode(cookie).decode('utf-8')
log.info(f"Cookie was base64 encoded, decoded successfully")
cookie = decoded_cookie
except Exception as e:
# If decoding fails, assume it's already plain text
log.info(f"Cookie is not base64 encoded or decode failed ({e}), using as-is")
log.debug(f"Cookie content: {cookie[:100]}...") # Log first 100 chars
# Create cookies directory if it doesn't exist
cookies_dir = os.path.join(config.STATE_DIR, 'cookies')
os.makedirs(cookies_dir, exist_ok=True)
# Use domain as filename (sanitized)
safe_domain = domain.replace(':', '_').replace('/', '_')
cookie_file = os.path.join(cookies_dir, f'{safe_domain}.txt')
log.info(f"Writing cookie file to: {cookie_file}")
# Convert cookie string to Netscape cookie file format
with open(cookie_file, 'w') as f:
f.write('# Netscape HTTP Cookie File\n')
f.write(f'# This file was generated by MeTube for {domain}\n')
f.write('# Edit at your own risk.\n\n')
# Parse cookie string (format: "key1=value1; key2=value2; ...")
cookie_count = 0
for cookie_pair in cookie.split(';'):
cookie_pair = cookie_pair.strip()
if '=' in cookie_pair:
key, value = cookie_pair.split('=', 1)
key = key.strip()
value = value.strip()
# Netscape format: domain\tflag\tpath\tsecure\texpiration\tname\tvalue
# domain: .domain.com (with leading dot for all subdomains)
# flag: TRUE (include subdomains)
# path: / (all paths)
# secure: FALSE (http and https)
# expiration: 2147483647 (max 32-bit timestamp - Jan 2038)
# name: cookie name
# value: cookie value
f.write(f'.{domain}\tTRUE\t/\tFALSE\t2147483647\t{key}\t{value}\n')
cookie_count += 1
log.debug(f"Added cookie: {key}={value[:20]}...")
log.info(f"Cookie file created successfully with {cookie_count} cookies at {cookie_file}")
return web.Response(text=serializer.encode({
'status': 'ok',
'cookie_file': cookie_file,
'cookie_count': cookie_count,
'msg': f'Cookie saved successfully for {domain} ({cookie_count} cookies)'
}))
except Exception as e:
log.error(f"Error saving cookie: {str(e)}", exc_info=True)
return web.Response(text=serializer.encode({
'status': 'error',
'msg': f'Failed to save cookie: {str(e)}'
}))
@routes.post(config.URL_PREFIX + 'delete') @routes.post(config.URL_PREFIX + 'delete')
async def delete(request): async def delete(request):
post = await request.json() post = await request.json()
@ -457,11 +360,7 @@ except ValueError as e:
async def add_cors(request): async def add_cors(request):
return web.Response(text=serializer.encode({"status": "ok"})) return web.Response(text=serializer.encode({"status": "ok"}))
async def cookie_cors(request):
return web.Response(text=serializer.encode({"status": "ok"}))
app.router.add_route('OPTIONS', config.URL_PREFIX + 'add', add_cors) app.router.add_route('OPTIONS', config.URL_PREFIX + 'add', add_cors)
app.router.add_route('OPTIONS', config.URL_PREFIX + 'cookie', cookie_cors)
async def on_prepare(request, response): async def on_prepare(request, response):
if 'Origin' in request.headers: if 'Origin' in request.headers:

View File

@ -7,7 +7,6 @@ import asyncio
import multiprocessing import multiprocessing
import logging import logging
import re import re
from urllib.parse import urlparse
import yt_dlp.networking.impersonate import yt_dlp.networking.impersonate
from dl_formats import get_format, get_opts, AUDIO_FORMATS from dl_formats import get_format, get_opts, AUDIO_FORMATS
@ -348,51 +347,6 @@ class DownloadQueue:
if playlist_item_limit > 0: if playlist_item_limit > 0:
log.info(f'playlist limit is set. Processing only first {playlist_item_limit} entries') log.info(f'playlist limit is set. Processing only first {playlist_item_limit} entries')
ytdl_options['playlistend'] = playlist_item_limit ytdl_options['playlistend'] = playlist_item_limit
# Check if cookie file exists for this domain
parsed_url = urlparse(dl.url)
domain = parsed_url.netloc
log.info(f"[Cookie] Checking for cookie file for domain: {domain}")
cookies_dir = os.path.join(self.config.STATE_DIR, 'cookies')
log.debug(f"[Cookie] Cookies directory: {cookies_dir}")
# Try domain-specific cookie file
safe_domain = domain.replace(':', '_').replace('/', '_')
cookie_file = os.path.join(cookies_dir, f'{safe_domain}.txt')
log.debug(f"[Cookie] Looking for cookie file at: {cookie_file}")
if os.path.exists(cookie_file):
log.info(f"[Cookie] Found cookie file: {cookie_file}")
# Verify file is readable and has content
try:
with open(cookie_file, 'r') as f:
lines = f.readlines()
cookie_lines = [l for l in lines if l.strip() and not l.startswith('#')]
log.info(f"[Cookie] Cookie file contains {len(cookie_lines)} cookie entries")
if len(cookie_lines) == 0:
log.warning(f"[Cookie] Cookie file exists but contains no cookies!")
else:
log.debug(f"[Cookie] First cookie entry: {cookie_lines[0][:50]}...")
except Exception as e:
log.error(f"[Cookie] Error reading cookie file: {e}", exc_info=True)
ytdl_options['cookiefile'] = cookie_file
log.info(f"[Cookie] Configured yt-dlp to use cookiefile: {cookie_file}")
else:
log.info(f"[Cookie] No cookie file found for domain {domain}")
log.debug(f"[Cookie] Checked path: {cookie_file}")
# List available cookie files for debugging
if os.path.exists(cookies_dir):
available_cookies = os.listdir(cookies_dir)
if available_cookies:
log.debug(f"[Cookie] Available cookie files: {available_cookies}")
else:
log.debug(f"[Cookie] Cookies directory is empty")
else:
log.debug(f"[Cookie] Cookies directory does not exist")
download = Download(dldirectory, self.config.TEMP_DIR, output, output_chapter, dl.quality, dl.format, ytdl_options, dl) download = Download(dldirectory, self.config.TEMP_DIR, output, output_chapter, dl.quality, dl.format, ytdl_options, dl)
if auto_start is True: if auto_start is True:
self.queue.put(download) self.queue.put(download)
@ -422,21 +376,14 @@ class DownloadQueue:
log.debug('Processing as a playlist') log.debug('Processing as a playlist')
entries = entry['entries'] entries = entry['entries']
log.info(f'playlist detected with {len(entries)} entries') log.info(f'playlist detected with {len(entries)} entries')
playlist_index_digits = len(str(len(entries))) playlist_index_digits = len(str(len(entries)))
results = [] results = []
if playlist_item_limit > 0: if playlist_item_limit > 0:
log.info(f'Playlist item limit is set. Processing only first {playlist_item_limit} entries') log.info(f'Playlist item limit is set. Processing only first {playlist_item_limit} entries')
entries = entries[:playlist_item_limit] entries = entries[:playlist_item_limit]
# Verify playlist entry has 'id' before using it
playlist_id = entry.get("id", "unknown_playlist")
if "id" not in entry:
log.warning(f"Playlist entry missing 'id' field. Using fallback 'unknown_playlist'. Entry keys: {list(entry.keys())}")
for index, etr in enumerate(entries, start=1): for index, etr in enumerate(entries, start=1):
etr["_type"] = "video" etr["_type"] = "video"
etr["playlist"] = playlist_id etr["playlist"] = entry["id"]
etr["playlist_index"] = '{{0:0{0:d}d}}'.format(playlist_index_digits).format(index) etr["playlist_index"] = '{{0:0{0:d}d}}'.format(playlist_index_digits).format(index)
for property in ("id", "title", "uploader", "uploader_id"): for property in ("id", "title", "uploader", "uploader_id"):
if property in entry: if property in entry:
@ -447,32 +394,9 @@ class DownloadQueue:
return {'status': 'ok'} return {'status': 'ok'}
elif etype == 'video' or (etype.startswith('url') and 'id' in entry and 'title' in entry): elif etype == 'video' or (etype.startswith('url') and 'id' in entry and 'title' in entry):
log.debug('Processing as a video') log.debug('Processing as a video')
# Extract ID from entry, or derive from URL if missing
video_id = entry.get('id')
if not video_id:
# Try to extract ID from URL (e.g., viewkey parameter or URL path)
video_url = entry.get('url', '')
if 'viewkey=' in video_url:
# Extract viewkey parameter (common in PornHub, etc.)
match = re.search(r'viewkey=([^&]+)', video_url)
if match:
video_id = match.group(1)
log.info(f"Extracted video ID from viewkey: {video_id}")
elif 'webpage_url' in entry:
# Use webpage_url as fallback
video_id = entry['webpage_url']
else:
# Last resort: use the URL itself
video_id = video_url
if not video_id:
log.error(f"Video entry missing 'id' field and could not extract from URL. Entry keys: {list(entry.keys())}")
return {'status': 'error', 'msg': "Video entry missing required 'id' field and URL extraction failed"}
key = entry.get('webpage_url') or entry['url'] key = entry.get('webpage_url') or entry['url']
if not self.queue.exists(key): if not self.queue.exists(key):
dl = DownloadInfo(video_id, entry.get('title') or video_id, key, quality, format, folder, custom_name_prefix, error, entry, playlist_item_limit) dl = DownloadInfo(entry['id'], entry.get('title') or entry['id'], key, quality, format, folder, custom_name_prefix, error, entry, playlist_item_limit)
await self.__add_download(dl, auto_start) await self.__add_download(dl, auto_start)
return {'status': 'ok'} return {'status': 'ok'}
return {'status': 'error', 'msg': f'Unsupported resource "{etype}"'} return {'status': 'error', 'msg': f'Unsupported resource "{etype}"'}

View File

@ -1,104 +0,0 @@
version: '3.8'
services:
metube:
build: .
image: metube:latest
container_name: metube
restart: unless-stopped
ports:
- "8081:8081"
volumes:
- ./downloads:/downloads
- ./metube-config:/config
# Optional: mount cookies file for authenticated downloads
# - ./cookies:/cookies:ro
environment:
# Basic configuration
- UID=0
- GID=0
- UMASK=022
# Download directories
- DOWNLOAD_DIR=/downloads
- STATE_DIR=/config
- TEMP_DIR=/downloads
# Download behavior
- DOWNLOAD_MODE=limited
- MAX_CONCURRENT_DOWNLOADS=3
- DELETE_FILE_ON_TRASHCAN=true
# Custom directories
- CUSTOM_DIRS=true
- CREATE_CUSTOM_DIRS=true
- CUSTOM_DIRS_EXCLUDE_REGEX=(^|/)[.@].*$
- DOWNLOAD_DIRS_INDEXABLE=false
# File naming
- OUTPUT_TEMPLATE=%(title)s.%(ext)s
- OUTPUT_TEMPLATE_CHAPTER=%(title)s - %(section_number)s %(section_title)s.%(ext)s
- OUTPUT_TEMPLATE_PLAYLIST=%(playlist_title)s/%(title)s.%(ext)s
# Playlist options
- DEFAULT_OPTION_PLAYLIST_STRICT_MODE=false
- DEFAULT_OPTION_PLAYLIST_ITEM_LIMIT=0
# Web server
- URL_PREFIX=
- PUBLIC_HOST_URL=download/
- PUBLIC_HOST_AUDIO_URL=audio_download/
- HOST=0.0.0.0
- PORT=8081
# Logging
- LOGLEVEL=INFO
- ENABLE_ACCESSLOG=false
# Theme
- DEFAULT_THEME=auto
# Optional: yt-dlp options
# - YTDL_OPTIONS={}
# - YTDL_OPTIONS_FILE=/path/to/ytdl_options.json
# Optional: cookies for authenticated downloads
# - YTDL_OPTIONS={"cookiefile":"/cookies/cookies.txt"}
# Optional: HTTPS configuration
# - HTTPS=true
# - CERTFILE=/ssl/cert.pem
# - KEYFILE=/ssl/key.pem
# Optional: robots.txt
# - ROBOTS_TXT=/app/robots.txt
# Optional: health check
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8081/version"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
# Optional: resource limits
# deploy:
# resources:
# limits:
# cpus: '2.0'
# memory: 2G
# reservations:
# cpus: '0.5'
# memory: 512M
# Optional: networks configuration
# networks:
# default:
# name: metube-network
# Optional: named volumes
# volumes:
# metube-downloads:
# driver: local
# metube-cookies:
# driver: local

View File

@ -7,8 +7,10 @@ mkdir -p "${DOWNLOAD_DIR}" "${STATE_DIR}" "${TEMP_DIR}"
if [ `id -u` -eq 0 ] && [ `id -g` -eq 0 ]; then if [ `id -u` -eq 0 ] && [ `id -g` -eq 0 ]; then
if [ "${UID}" -eq 0 ]; then if [ "${UID}" -eq 0 ]; then
echo "Running as root user (UID=0, GID=0) - this is now the default configuration" echo "Warning: it is not recommended to run as root user, please check your setting of the UID environment variable"
fi fi
echo "Changing ownership of download and state directories to ${UID}:${GID}"
chown -R "${UID}":"${GID}" /app "${DOWNLOAD_DIR}" "${STATE_DIR}" "${TEMP_DIR}"
echo "Running MeTube as user ${UID}:${GID}" echo "Running MeTube as user ${UID}:${GID}"
exec su-exec "${UID}":"${GID}" python3 app/main.py exec su-exec "${UID}":"${GID}" python3 app/main.py
else else

View File

@ -1,20 +0,0 @@
{
"venvPath": ".",
"venv": ".venv",
"pythonVersion": "3.13",
"include": ["app"],
"executionEnvironments": [
{
"root": ".",
"pythonVersion": "3.13",
"extraPaths": [".", "app"]
}
],
"typeCheckingMode": "basic",
"reportMissingImports": "warning",
"reportOptionalMemberAccess": "warning",
"reportOptionalContextManager": "warning",
"reportAttributeAccessIssue": "warning",
"reportArgumentType": "warning",
"reportCallIssue": "warning"
}

View File

@ -744,11 +744,11 @@ wheels = [
[[package]] [[package]]
name = "yt-dlp" name = "yt-dlp"
version = "2025.10.22" version = "2025.9.26"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/08/70/cf4bd6c837ab0a709040888caa70d166aa2dfbb5018d1d5c983bf0b50254/yt_dlp-2025.10.22.tar.gz", hash = "sha256:db2d48133222b1d9508c6de757859c24b5cefb9568cf68ccad85dac20b07f77b", size = 3046863, upload-time = "2025-10-22T19:53:19.301Z" } sdist = { url = "https://files.pythonhosted.org/packages/58/8f/0daea0feec1ab85e7df85b98ec7cc8c85d706362e80efc5375c7007dc3dc/yt_dlp-2025.9.26.tar.gz", hash = "sha256:c148ae8233ac4ce6c5fbf6f70fcc390f13a00f59da3776d373cf88c5370bda86", size = 3037475, upload-time = "2025-09-26T22:23:42.882Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/cc/2a/fd184bf97d570841aa86b4aeb84aee93e7957a34059dafd4982157c10bff/yt_dlp-2025.10.22-py3-none-any.whl", hash = "sha256:9c803a9598859f91d0d5bd3337f1506ecb40bbe97f6efbe93bc4461fed344fb2", size = 3248983, upload-time = "2025-10-22T19:53:16.483Z" }, { url = "https://files.pythonhosted.org/packages/35/94/18210c5e6a9d7e622a3b3f4a73dde205f7adf0c46b42b27d0da8c6e5c872/yt_dlp-2025.9.26-py3-none-any.whl", hash = "sha256:36f5fbc153600f759abd48d257231f0e0a547a115ac7ffb05d5b64e5c7fdf8a2", size = 3241906, upload-time = "2025-09-26T22:23:39.976Z" },
] ]
[package.optional-dependencies] [package.optional-dependencies]