Загрузить файлы в «/»
This commit is contained in:
458
YTProcessing.py
458
YTProcessing.py
@@ -2,10 +2,10 @@ import yt_dlp
|
||||
import aiohttp
|
||||
import asyncio
|
||||
from _config import *
|
||||
from cachetools import TTLCache
|
||||
import zipfile
|
||||
import io
|
||||
|
||||
|
||||
class YTVideoInfo:
|
||||
def __init__(self, link: str):
|
||||
self.link = link
|
||||
@@ -56,11 +56,44 @@ class YTVideoInfo:
|
||||
if key in seen:
|
||||
continue
|
||||
seen.add(key)
|
||||
size = f.get('filesize') or f.get('filesize_approx')
|
||||
size_str = None
|
||||
if size:
|
||||
if size >= 1_073_741_824:
|
||||
size_str = f"{size / 1_073_741_824:.1f} GB"
|
||||
elif size >= 1_048_576:
|
||||
size_str = f"{size / 1_048_576:.1f} MB"
|
||||
else:
|
||||
size_str = f"{size / 1024:.0f} KB"
|
||||
|
||||
audio_size = 0
|
||||
for af in info['formats']:
|
||||
if (af.get('vcodec') == 'none'
|
||||
and af.get('acodec') != 'none'
|
||||
and af.get('ext') == 'm4a'):
|
||||
audio_size = af.get('filesize') or af.get('filesize_approx') or 0
|
||||
break
|
||||
|
||||
total_size = (size or 0) + audio_size
|
||||
|
||||
label = f"{height}p"
|
||||
if fps and fps > 30:
|
||||
label += f" {int(fps)}fps"
|
||||
label += f" ({ext.upper()})"
|
||||
formats.append({'id': f['format_id'], 'label': label, 'height': height})
|
||||
if size_str:
|
||||
label += f" ~ {size_str}"
|
||||
|
||||
formats.append({
|
||||
'id': f['format_id'],
|
||||
'label': label,
|
||||
'height': height,
|
||||
'ext': ext,
|
||||
'fps': fps,
|
||||
'vcodec': f.get('vcodec', ''),
|
||||
'acodec': f.get('acodec', ''),
|
||||
'filesize': total_size or None,
|
||||
})
|
||||
|
||||
return sorted(formats, key=lambda x: x['height'], reverse=True)
|
||||
|
||||
def get_subtitles(self, info: dict) -> list[dict]:
|
||||
@@ -126,7 +159,17 @@ class YTVideoStream:
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(executor, self._get_urls_sync)
|
||||
|
||||
async def generate(self):
|
||||
async def generate(
|
||||
self,
|
||||
prebuffer: bool = True,
|
||||
stop_event: asyncio.Event | None = None,
|
||||
):
|
||||
"""
|
||||
Yields MP4 chunks.
|
||||
stop_event — when set, cleanly terminates FFmpeg and stops yielding.
|
||||
This allows the server to cancel an in-progress stream when the
|
||||
client reconnects with a new format_id.
|
||||
"""
|
||||
video_url, audio_url = await self._get_urls()
|
||||
|
||||
if audio_url:
|
||||
@@ -146,110 +189,61 @@ class YTVideoStream:
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
|
||||
pre_buffer, buffered_mb = [], 0
|
||||
try:
|
||||
while buffered_mb < PRE_BUFFER_MB:
|
||||
chunk = await process.stdout.read(CHUNK_SIZE)
|
||||
if not chunk:
|
||||
for c in pre_buffer:
|
||||
yield c
|
||||
if prebuffer:
|
||||
pre_buffer, buffered_mb = [], 0
|
||||
while buffered_mb < PRE_BUFFER_MB:
|
||||
if stop_event and stop_event.is_set():
|
||||
return
|
||||
try:
|
||||
chunk = await asyncio.wait_for(
|
||||
process.stdout.read(CHUNK_SIZE),
|
||||
timeout=15.0
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
yield b''
|
||||
continue
|
||||
if not chunk:
|
||||
for c in pre_buffer: yield c
|
||||
return
|
||||
pre_buffer.append(chunk)
|
||||
buffered_mb += len(chunk) / (1024 * 1024)
|
||||
for c in pre_buffer:
|
||||
yield c
|
||||
else:
|
||||
first = True
|
||||
while first:
|
||||
if stop_event and stop_event.is_set():
|
||||
return
|
||||
try:
|
||||
chunk = await asyncio.wait_for(
|
||||
process.stdout.read(CHUNK_SIZE),
|
||||
timeout=15.0
|
||||
)
|
||||
if chunk:
|
||||
yield chunk
|
||||
first = False
|
||||
else:
|
||||
return
|
||||
except asyncio.TimeoutError:
|
||||
yield b''
|
||||
continue
|
||||
|
||||
while True:
|
||||
if stop_event and stop_event.is_set():
|
||||
return
|
||||
pre_buffer.append(chunk)
|
||||
buffered_mb += len(chunk) / (1024 * 1024)
|
||||
|
||||
for c in pre_buffer:
|
||||
yield c
|
||||
|
||||
while True:
|
||||
chunk = await process.stdout.read(CHUNK_SIZE)
|
||||
try:
|
||||
chunk = await asyncio.wait_for(
|
||||
process.stdout.read(CHUNK_SIZE),
|
||||
timeout=30.0
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
yield b''
|
||||
continue
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
finally:
|
||||
try:
|
||||
process.kill()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
else:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(video_url) as r:
|
||||
async for chunk in r.content.iter_chunked(CHUNK_SIZE):
|
||||
yield chunk
|
||||
|
||||
|
||||
class YTVideoDownloader:
|
||||
def __init__(self, link: str, format_id: str = 'best'):
|
||||
self.link = link
|
||||
self.format_id = format_id
|
||||
|
||||
def _get_urls_sync(self) -> tuple[str, str | None, str]:
|
||||
ydl_opts = {
|
||||
'format': (
|
||||
f'{self.format_id}[ext=mp4]+bestaudio[ext=m4a]'
|
||||
f'/bestvideo[ext=mp4]+bestaudio[ext=m4a]'
|
||||
f'/best[ext=mp4]/best'
|
||||
),
|
||||
'quiet': True,
|
||||
'no_warnings': True,
|
||||
'socket_timeout': 30,
|
||||
'http_headers': {
|
||||
'User-Agent': (
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '
|
||||
'AppleWebKit/537.36 (KHTML, like Gecko) '
|
||||
'Chrome/120.0.0.0 Safari/537.36'
|
||||
)
|
||||
},
|
||||
}
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
info = ydl.extract_info(self.link, download=False)
|
||||
|
||||
title = info.get('title', 'video')
|
||||
|
||||
if 'requested_formats' in info and len(info['requested_formats']) == 2:
|
||||
return (
|
||||
info['requested_formats'][0]['url'],
|
||||
info['requested_formats'][1]['url'],
|
||||
title,
|
||||
)
|
||||
return info['url'], None, title
|
||||
|
||||
async def get_urls(self) -> tuple[str, str | None, str]:
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(executor, self._get_urls_sync)
|
||||
|
||||
async def generate(self, progress_callback=None):
|
||||
video_url, audio_url, _ = await self.get_urls()
|
||||
|
||||
if audio_url:
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
'ffmpeg',
|
||||
'-i', video_url,
|
||||
'-i', audio_url,
|
||||
'-c:v', 'copy',
|
||||
'-c:a', 'aac',
|
||||
'-b:a', '192k',
|
||||
'-g', '60',
|
||||
'-f', 'mp4',
|
||||
'-movflags', 'frag_keyframe+empty_moov+faststart',
|
||||
'-frag_duration', '2000000',
|
||||
'pipe:1',
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
|
||||
bytes_sent = 0
|
||||
try:
|
||||
while True:
|
||||
chunk = await process.stdout.read(CHUNK_SIZE)
|
||||
if not chunk:
|
||||
break
|
||||
bytes_sent += len(chunk)
|
||||
|
||||
if progress_callback:
|
||||
progress_callback(bytes_sent)
|
||||
|
||||
yield chunk
|
||||
finally:
|
||||
try:
|
||||
process.kill()
|
||||
@@ -260,22 +254,118 @@ class YTVideoDownloader:
|
||||
else:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(video_url) as r:
|
||||
bytes_sent = 0
|
||||
async for chunk in r.content.iter_chunked(CHUNK_SIZE):
|
||||
bytes_sent += len(chunk)
|
||||
if progress_callback:
|
||||
progress_callback(bytes_sent)
|
||||
if stop_event and stop_event.is_set():
|
||||
return
|
||||
yield chunk
|
||||
|
||||
|
||||
class YTPlaylist:
|
||||
"""Handles playlist metadata extraction and streaming/downloading."""
|
||||
class YTVideoDownloader:
|
||||
def __init__(self, link: str, format_id: str = 'best'):
|
||||
self.link = link
|
||||
self.format_id = format_id
|
||||
|
||||
def _download_sync(self, progress_callback=None) -> tuple[str, str]:
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
out_tmpl = os.path.join(tmp_dir, 'video.%(ext)s')
|
||||
|
||||
ydl_opts = {
|
||||
'format': (
|
||||
f'{self.format_id}[ext=mp4]+bestaudio[ext=m4a]'
|
||||
f'/bestvideo[ext=mp4]+bestaudio[ext=m4a]'
|
||||
f'/best[ext=mp4]/best'
|
||||
),
|
||||
'outtmpl': out_tmpl,
|
||||
'merge_output_format': 'mp4',
|
||||
'quiet': True,
|
||||
'no_warnings': True,
|
||||
|
||||
'concurrent_fragment_downloads': 4,
|
||||
|
||||
'retries': 5,
|
||||
'fragment_retries': 5,
|
||||
|
||||
'socket_timeout': 30,
|
||||
|
||||
'postprocessors': [
|
||||
{
|
||||
'key': 'FFmpegVideoRemuxer',
|
||||
'preferedformat': 'mp4',
|
||||
},
|
||||
],
|
||||
'postprocessor_args': {
|
||||
'ffmpeg': [
|
||||
'-c:v', 'copy',
|
||||
'-c:a', 'aac',
|
||||
'-b:a', '192k',
|
||||
'-movflags', '+faststart',
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
if progress_callback:
|
||||
def hook(d):
|
||||
if d['status'] == 'downloading':
|
||||
raw = d.get('_percent_str', '0%').strip().replace('%', '')
|
||||
try:
|
||||
pct = min(float(raw), 99.0)
|
||||
progress_callback(pct)
|
||||
except Exception:
|
||||
pass
|
||||
elif d['status'] == 'finished':
|
||||
progress_callback(99)
|
||||
|
||||
ydl_opts['progress_hooks'] = [hook]
|
||||
|
||||
try:
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
ydl.download([self.link])
|
||||
except Exception as e:
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
raise RuntimeError(f'yt-dlp failed: {e}')
|
||||
|
||||
mp4_files = [
|
||||
f for f in os.listdir(tmp_dir)
|
||||
if f.endswith('.mp4')
|
||||
]
|
||||
if not mp4_files:
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
raise FileNotFoundError('No MP4 found after download.')
|
||||
|
||||
return os.path.join(tmp_dir, mp4_files[0]), tmp_dir
|
||||
|
||||
async def download(self, progress_callback=None) -> tuple[str, str]:
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
return await asyncio.wait_for(
|
||||
loop.run_in_executor(
|
||||
executor,
|
||||
lambda: self._download_sync(progress_callback)
|
||||
),
|
||||
timeout=600
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
raise TimeoutError('Download timed out after 10 minutes.')
|
||||
|
||||
async def stream_file(self, filepath: str):
|
||||
file_size = os.path.getsize(filepath)
|
||||
|
||||
async def generator():
|
||||
with open(filepath, 'rb') as f:
|
||||
while True:
|
||||
chunk = f.read(CHUNK_SIZE)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
return generator(), file_size
|
||||
|
||||
|
||||
class YTPlaylist:
|
||||
def __init__(self, link: str):
|
||||
self.link = link
|
||||
|
||||
def _fetch_sync(self) -> dict:
|
||||
"""Fetch full playlist info — each entry has its own formats."""
|
||||
ydl_opts = {
|
||||
'quiet': True,
|
||||
'no_warnings': True,
|
||||
@@ -287,10 +377,6 @@ class YTPlaylist:
|
||||
return ydl.extract_info(self.link, download=False)
|
||||
|
||||
def _fetch_flat_sync(self) -> dict:
|
||||
"""
|
||||
Fast fetch — only titles/IDs, no format details.
|
||||
Use this for the info endpoint to avoid 30s+ waits.
|
||||
"""
|
||||
ydl_opts = {
|
||||
'quiet': True,
|
||||
'no_warnings': True,
|
||||
@@ -309,7 +395,6 @@ class YTPlaylist:
|
||||
return await loop.run_in_executor(executor, self._fetch_sync)
|
||||
|
||||
def get_entries(self, info: dict) -> list[dict]:
|
||||
"""Return clean list of video entries from playlist."""
|
||||
entries = []
|
||||
for i, entry in enumerate(info.get('entries', []), 1):
|
||||
if not entry:
|
||||
@@ -334,87 +419,88 @@ class YTPlaylist:
|
||||
'entries': entries,
|
||||
}
|
||||
|
||||
async def generate_zip(
|
||||
self,
|
||||
format_id: str = 'best',
|
||||
progress_callback=None
|
||||
):
|
||||
"""
|
||||
Async generator — yields chunks of a ZIP file containing all videos.
|
||||
Each video is streamed through FFmpeg and added to the zip on the fly.
|
||||
"""
|
||||
info = await self.fetch_flat()
|
||||
entries = self.get_entries(info)
|
||||
total = len(entries)
|
||||
async def generate_zip(self, format_id: str = 'best', progress_callback=None):
|
||||
info = await self.fetch_flat()
|
||||
entries = self.get_entries(info)
|
||||
total = len(entries)
|
||||
|
||||
zip_buffer = io.BytesIO()
|
||||
zip_buffer = io.BytesIO()
|
||||
|
||||
for i, entry in enumerate(entries, 1):
|
||||
video_url_yt = entry['url']
|
||||
safe_title = "".join(
|
||||
c for c in entry['title'] if c.isascii() and (c.isalnum() or c in ' ._-')
|
||||
)[:60].strip() or f'video_{i}'
|
||||
for i, entry in enumerate(entries, 1):
|
||||
video_url_yt = entry['url']
|
||||
safe_title = "".join(
|
||||
c for c in entry['title']
|
||||
if c.isascii() and (c.isalnum() or c in ' ._-')
|
||||
)[:60].strip() or f'video_{i}'
|
||||
|
||||
if progress_callback:
|
||||
progress_callback(i, total, entry['title'])
|
||||
if progress_callback:
|
||||
progress_callback(i, total, entry['title'])
|
||||
|
||||
try:
|
||||
downloader = YTVideoDownloader(video_url_yt, format_id)
|
||||
vid_url, aud_url, _ = await downloader.get_urls()
|
||||
except Exception as e:
|
||||
print(f'Skipping {entry["title"]}: {e}')
|
||||
continue
|
||||
try:
|
||||
info_obj = YTVideoInfo(video_url_yt)
|
||||
vid_info = await info_obj.fetch()
|
||||
|
||||
video_bytes = io.BytesIO()
|
||||
|
||||
if aud_url:
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
'ffmpeg',
|
||||
'-i', vid_url,
|
||||
'-i', aud_url,
|
||||
'-c:v', 'copy',
|
||||
'-c:a', 'aac',
|
||||
'-b:a', '192k',
|
||||
'-f', 'mp4',
|
||||
'-movflags', 'frag_keyframe+empty_moov+faststart',
|
||||
'pipe:1',
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
try:
|
||||
while True:
|
||||
chunk = await process.stdout.read(256 * 1024)
|
||||
if not chunk:
|
||||
break
|
||||
video_bytes.write(chunk)
|
||||
finally:
|
||||
try:
|
||||
process.kill()
|
||||
await process.wait()
|
||||
except Exception:
|
||||
pass
|
||||
requested = vid_info.get('requested_formats', [])
|
||||
if len(requested) == 2:
|
||||
vid_url = requested[0]['url']
|
||||
aud_url = requested[1]['url']
|
||||
else:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(vid_url) as r:
|
||||
async for chunk in r.content.iter_chunked(256 * 1024):
|
||||
video_bytes.write(chunk)
|
||||
vid_url = vid_info.get('url')
|
||||
aud_url = None
|
||||
except Exception as e:
|
||||
print(f'Skipping {entry["title"]}: {e}')
|
||||
continue
|
||||
|
||||
video_bytes.seek(0)
|
||||
filename = f'{i:02d}. {safe_title}.mp4'
|
||||
with zipfile.ZipFile(zip_buffer, 'a', zipfile.ZIP_STORED) as zf:
|
||||
zf.writestr(filename, video_bytes.read())
|
||||
video_bytes = io.BytesIO()
|
||||
|
||||
zip_buffer.seek(0)
|
||||
while True:
|
||||
chunk = zip_buffer.read(256 * 1024)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
if aud_url:
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
'ffmpeg',
|
||||
'-i', vid_url,
|
||||
'-i', aud_url,
|
||||
'-c:v', 'copy',
|
||||
'-c:a', 'aac',
|
||||
'-b:a', '192k',
|
||||
'-f', 'mp4',
|
||||
'-movflags', 'frag_keyframe+empty_moov+faststart',
|
||||
'pipe:1',
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
try:
|
||||
while True:
|
||||
chunk = await process.stdout.read(CHUNK_SIZE)
|
||||
if not chunk:
|
||||
break
|
||||
video_bytes.write(chunk)
|
||||
finally:
|
||||
try:
|
||||
process.kill()
|
||||
await process.wait()
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(vid_url) as r:
|
||||
async for chunk in r.content.iter_chunked(CHUNK_SIZE):
|
||||
video_bytes.write(chunk)
|
||||
|
||||
zip_buffer.seek(0)
|
||||
zip_buffer.truncate(0)
|
||||
video_bytes.seek(0)
|
||||
filename = f'{i:02d}. {safe_title}.mp4'
|
||||
with zipfile.ZipFile(zip_buffer, 'a', zipfile.ZIP_STORED) as zf:
|
||||
zf.writestr(filename, video_bytes.read())
|
||||
|
||||
zip_buffer.seek(0)
|
||||
remainder = zip_buffer.read()
|
||||
if remainder:
|
||||
yield remainder
|
||||
while True:
|
||||
chunk = zip_buffer.read(CHUNK_SIZE)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
zip_buffer.seek(0)
|
||||
zip_buffer.truncate(0)
|
||||
|
||||
zip_buffer.seek(0)
|
||||
remainder = zip_buffer.read()
|
||||
if remainder:
|
||||
yield remainder
|
||||
Reference in New Issue
Block a user