1
0
mirror of https://github.com/hanxi/xiaomusic.git synced 2025-12-06 14:52:50 +08:00

Auto-format code 🧹🌟🤖

This commit is contained in:
Formatter [BOT]
2025-06-09 14:57:17 +00:00
parent be4028b235
commit a62061f4ae
2 changed files with 30 additions and 26 deletions

View File

@@ -42,12 +42,12 @@ from starlette.responses import FileResponse, Response
from xiaomusic import __version__
from xiaomusic.utils import (
check_bili_fav_list,
chmoddir,
convert_file_to_mp3,
deepcopy_data_no_sensitive_info,
download_one_music,
download_playlist,
check_bili_fav_list,
downloadfile,
get_latest_version,
is_mp3,
@@ -553,9 +553,11 @@ async def downloadplaylist(data: DownloadPlayList, Verifcation=Depends(verificat
download_proc_list = []
if bili_fav_list:
for bvid, title in bili_fav_list.items():
bvurl = f"https://www.bilibili.com/video/{bvid}"
download_proc_list[title] = await download_one_music(config, bvurl, os.path.join(data.dirname, title))
for title, download_proc_sigle in download_proc_list.items():
bvurl = f"https://www.bilibili.com/video/{bvid}"
download_proc_list[title] = await download_one_music(
config, bvurl, os.path.join(data.dirname, title)
)
for title, download_proc_sigle in download_proc_list.items():
exit_code = await download_proc_sigle.wait()
log.info(f"Download completed {title} with exit code {exit_code}")
dir_path = os.path.join(config.download_path, data.dirname)

View File

@@ -23,7 +23,7 @@ from collections.abc import AsyncIterator
from dataclasses import asdict, dataclass
from http.cookies import SimpleCookie
from time import sleep
from urllib.parse import urlparse, parse_qs
from urllib.parse import parse_qs, urlparse
import aiohttp
import mutagen
@@ -941,27 +941,28 @@ def _set_wave_tags(audio, info):
audio["Title"] = info.title
audio["Artist"] = info.artist
async def check_bili_fav_list(url):
bvid_info = {}
parsed_url = urlparse(url)
path = parsed_url.path
# 提取查询参数
query_params = parse_qs(parsed_url.query)
if 'space.bilibili.com' in url:
if '/favlist' in path:
lid = query_params.get('fid', [None])[0]
type = query_params.get('ctype', [None])[0]
if type == '11':
type = 'create'
elif type == '21':
type = 'collect'
if "space.bilibili.com" in url:
if "/favlist" in path:
lid = query_params.get("fid", [None])[0]
type = query_params.get("ctype", [None])[0]
if type == "11":
type = "create"
elif type == "21":
type = "collect"
else:
raise ValueError("当前只支持合集和收藏夹")
elif '/lists/' in path:
parts = path.split('/')
if len(parts) >= 4 and '?' in url:
elif "/lists/" in path:
parts = path.split("/")
if len(parts) >= 4 and "?" in url:
lid = parts[3] # 提取 lid
type = query_params.get('type', [None])[0]
type = query_params.get("type", [None])[0]
# https://api.bilibili.com/x/polymer/web-space/seasons_archives_list?season_id={lid}&page_size=30&page_num=1
page_size = 100
page_num = 1
@@ -972,38 +973,38 @@ async def check_bili_fav_list(url):
"Origin": "https://space.bilibili.com",
}
async with aiohttp.ClientSession(headers=headers) as session:
if type == 'season' or type == 'collect':
if type == "season" or type == "collect":
while True:
list_url = f"https://api.bilibili.com/x/polymer/web-space/seasons_archives_list?season_id={lid}&page_size={page_size}&page_num={page_num}"
async with session.get(list_url) as response:
if response.status != 200:
raise Exception(f"Failed to fetch data from {list_url}")
data = await response.json()
archives = data.get('data', {}).get('archives', [])
archives = data.get("data", {}).get("archives", [])
if not archives:
break
for archive in archives:
bvid = archive.get('bvid', None)
title = archive.get('title', None)
bvid = archive.get("bvid", None)
title = archive.get("title", None)
bvid_info[bvid] = title
if len(archives) < page_size:
break
page_num += 1
page_num += 1
sleep(1)
elif type == 'create':
elif type == "create":
while True:
list_url = f"https://api.bilibili.com/x/v3/fav/resource/list?media_id={lid}&pn={page_num}&ps={page_size}&order=mtime"
async with session.get(list_url) as response:
if response.status != 200:
raise Exception(f"Failed to fetch data from {list_url}")
data = await response.json()
medias = data.get('data', {}).get('medias', [])
medias = data.get("data", {}).get("medias", [])
if not medias:
break
for media in medias:
bvid = media.get('bvid', None)
title = media.get('title', None)
bvid = media.get("bvid", None)
title = media.get("title", None)
bvurl = f"https://www.bilibili.com/video/{bvid}"
bvid_info[bvurl] = title
@@ -1014,6 +1015,7 @@ async def check_bili_fav_list(url):
raise ValueError("当前只支持合集和收藏夹")
return bvid_info
# 下载播放列表
async def download_playlist(config, url, dirname):
title = f"{dirname}/%(title)s.%(ext)s"