|
2 | 2 | import os
|
3 | 3 | from pathlib import Path
|
4 | 4 | import re
|
| 5 | +import string |
5 | 6 | import traceback
|
| 7 | +from typing import cast |
6 | 8 |
|
7 | 9 | import aiofiles
|
8 | 10 | import httpx
|
@@ -93,6 +95,9 @@ async def archive_bvid(
|
93 | 95 | logined: bool = False,
|
94 | 96 | semaphore: asyncio.Semaphore,
|
95 | 97 | ):
|
| 98 | + def get_codec_version(codec: str) -> tuple[int, ...]: |
| 99 | + return tuple(int("".join(char for char in ver if char in string.hexdigits), base=16) for ver in codec.split(".")) |
| 100 | + |
96 | 101 | async with semaphore:
|
97 | 102 | assert d.hierarchy is True, _("hierarchy 必须为 True") # 为保持后续目录结构、文件命名的一致性
|
98 | 103 | assert d.client.cookies.get("SESSDATA") is not None, _(
|
@@ -168,21 +173,55 @@ def delete_cache(reason: str = ""):
|
168 | 173 | codec = None
|
169 | 174 | quality = None
|
170 | 175 | if video_info.dash:
|
171 |
| - # 选择编码 dvh->hev->avc |
172 |
| - # 不选 av0 ,毕竟目前没几个设备能拖得动 |
173 |
| - codec_candidates = ["dvh", "hev", "avc"] |
174 |
| - for codec_candidate in codec_candidates: |
175 |
| - for media in video_info.dash.videos: |
176 |
| - if media.codec.startswith(codec_candidate): |
177 |
| - codec = media.codec |
178 |
| - quality = media.quality |
179 |
| - print(f'{file_basename}: "{codec}" "{media.quality}" ...') |
180 |
| - break |
181 |
| - if codec is not None: |
182 |
| - break |
183 |
| - assert ( |
184 |
| - codec is not None and quality is not None |
185 |
| - ), f"{file_basename}: " + _("没有 dvh、avc 或 hevc 编码的视频") |
| 176 | + assert video_info.dash.videos, "Dash video streams not found" |
| 177 | + assert all(media.codec and media.quality_id and media.bandwidth for media in video_info.dash.videos), ( |
| 178 | + "All video streams must have codec, quality, and bandwidth information" |
| 179 | + ) |
| 180 | + |
| 181 | + high_quality_codec_candidates = {"dvh", "hvc"} |
| 182 | + all_codecs = {cast(str, media.codec)[:3] for media in video_info.dash.videos} |
| 183 | + known_codecs = high_quality_codec_candidates.union({"avc", "hev", "av0"}) |
| 184 | + assert all_codecs.issubset(known_codecs), f"Unknown codecs found: {all_codecs - known_codecs}" |
| 185 | + |
| 186 | + # First sort by quality, then codec preference, then bandwidth. |
| 187 | + sorted_videos = sorted( |
| 188 | + [media for media in video_info.dash.videos if media.codec and media.quality_id and media.bandwidth], |
| 189 | + key=lambda m: ( |
| 190 | + m.quality_id, |
| 191 | + cast(str, m.codec)[:3] in high_quality_codec_candidates, |
| 192 | + m.bandwidth, |
| 193 | + ), |
| 194 | + reverse=True, |
| 195 | + ) |
| 196 | + |
| 197 | + if not sorted_videos: |
| 198 | + raise APIError(f"{file_basename}: " + _("没有可用的 dash 视频"), page.p_url) |
| 199 | + |
| 200 | + # Get the properties of the best video after the initial sort. |
| 201 | + top_candidate = sorted_videos[0] |
| 202 | + target_codec_prefix = cast(str, top_candidate.codec)[:3] |
| 203 | + target_quality = top_candidate.quality_id |
| 204 | + |
| 205 | + # Filter videos to only those with the same quality and codec type. |
| 206 | + candidates = [ |
| 207 | + m |
| 208 | + for m in sorted_videos |
| 209 | + if m.quality_id == target_quality and cast(str, m.codec)[:3] == target_codec_prefix |
| 210 | + ] |
| 211 | + |
| 212 | + # Second sort by full codec version. |
| 213 | + final_candidates = sorted( |
| 214 | + candidates, |
| 215 | + key=lambda m: get_codec_version(cast(str, m.codec)), |
| 216 | + reverse=True, |
| 217 | + ) |
| 218 | + |
| 219 | + best_video = final_candidates[0] |
| 220 | + codec = best_video.codec |
| 221 | + quality = best_video.quality_id |
| 222 | + assert codec is not None and quality is not None, f"{file_basename}: " + _("无法确定最佳视频编码与画质") |
| 223 | + |
| 224 | + print(f'{file_basename}: "{codec}" "{best_video.quality}" ...') |
186 | 225 | elif video_info.other:
|
187 | 226 | # print(f"{file_basename}: 未解析到 dash 资源,交给 bilix 处理 ...")
|
188 | 227 | print("{file_basename}: " + _("未解析到 dash 资源,交给 bilix 处理 ..."))
|
|
0 commit comments