feat(app): 支持 API 模式设置临时代理

This commit is contained in:
2025-04-12 10:40:08 +08:00
parent da86da6bb4
commit c88ac3de3d
13 changed files with 167 additions and 27 deletions

View File

@@ -287,6 +287,7 @@ class XHS:
bar,
data: bool,
cookie: str = None,
proxy: str = None,
):
if await self.skip_download(i := self.__extract_link_id(url)) and not data:
msg = _("作品 {0} 存在下载记录,跳过处理").format(i)
@@ -297,6 +298,7 @@ class XHS:
url,
log=log,
cookie=cookie,
proxy=proxy,
)
namespace = self.__generate_data_object(html)
if not namespace:
@@ -552,11 +554,10 @@ class XHS:
None,
not extract.skip,
extract.cookie,
extract.proxy,
):
msg = _("获取小红书作品数据成功")
else:
msg = _("获取小红书作品数据失败")
data = None
return ExtractData(
message=msg, url=url[0] if url else extract.url, data=data
)
return ExtractData(message=msg, params=extract, data=data)

View File

@@ -1,6 +1,7 @@
from typing import TYPE_CHECKING
from httpx import HTTPError
from httpx import get
from ..module import ERROR, Manager, logging, retry, sleep_time
from ..translation import _
@@ -19,6 +20,7 @@ class Html:
self.retry = manager.retry
self.client = manager.request_client
self.headers = manager.headers
self.timeout = manager.timeout
@retry
async def request_url(
@@ -27,14 +29,15 @@ class Html:
content=True,
log=None,
cookie: str = None,
proxy: str = None,
**kwargs,
) -> str:
headers = self.update_cookie(
cookie,
)
try:
match content:
case True:
match (content, bool(proxy)):
case (True, False):
response = await self.__request_url_get(
url,
headers,
@@ -43,7 +46,17 @@ class Html:
await sleep_time()
response.raise_for_status()
return response.text
case False:
case (True, True):
response = await self.__request_url_get_proxy(
url,
headers,
proxy,
**kwargs,
)
await sleep_time()
response.raise_for_status()
return response.text
case (False, False):
response = await self.__request_url_head(
url,
headers,
@@ -51,6 +64,15 @@ class Html:
)
await sleep_time()
return str(response.url)
case (False, True):
response = await self.__request_url_head_proxy(
url,
headers,
proxy,
**kwargs,
)
await sleep_time()
return str(response.url)
case _:
raise ValueError
except HTTPError as error:
@@ -81,6 +103,23 @@ class Html:
**kwargs,
)
async def __request_url_head_proxy(
self,
url: str,
headers: dict,
proxy: str,
**kwargs,
):
return await self.client.head(
url,
headers=headers,
proxy=proxy,
follow_redirects=True,
verify=False,
timeout=self.timeout,
**kwargs,
)
async def __request_url_get(
self,
url: str,
@@ -92,3 +131,20 @@ class Html:
headers=headers,
**kwargs,
)
async def __request_url_get_proxy(
self,
url: str,
headers: dict,
proxy: str,
**kwargs,
):
return get(
url,
headers=headers,
proxy=proxy,
follow_redirects=True,
verify=False,
timeout=self.timeout,
**kwargs,
)

View File

@@ -89,6 +89,7 @@ class Manager:
self.print_proxy_tip(
_print,
)
self.timeout = timeout
self.request_client = AsyncClient(
headers=self.headers
| {

View File

@@ -6,10 +6,11 @@ class ExtractParams(BaseModel):
download: bool = False
index: list = None
cookie: str = None
proxy: str = None
skip: bool = False
class ExtractData(BaseModel):
message: str
url: str
params: ExtractParams
data: dict | None