mirror of
https://github.com/ihmily/DouyinLiveRecorder.git
synced 2025-12-26 05:48:32 +08:00
Compare commits
37 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
add187f8d8 | ||
|
|
0333cb4a01 | ||
|
|
73857755a7 | ||
|
|
fec734ae74 | ||
|
|
853d03ea14 | ||
|
|
2fb7f7afd7 | ||
|
|
200e5b5b58 | ||
|
|
abb204e6e9 | ||
|
|
271a53621d | ||
|
|
d77760f3c9 | ||
|
|
9c913e23cc | ||
|
|
af37bf28f0 | ||
|
|
d4796409c7 | ||
|
|
93a12ab41d | ||
|
|
525b720627 | ||
|
|
e9f2a55ceb | ||
|
|
3965487746 | ||
|
|
e80f1e653a | ||
|
|
63787f1743 | ||
|
|
a328c6a1c5 | ||
|
|
be2c3a393f | ||
|
|
c7e3cf47ce | ||
|
|
199186fb09 | ||
|
|
5778ebc4b3 | ||
|
|
d9f985303a | ||
|
|
110d5bded4 | ||
|
|
e478d72e62 | ||
|
|
bcfc268c1c | ||
|
|
8e4e9b098f | ||
|
|
9f499f3fa6 | ||
|
|
ae8200e01c | ||
|
|
952eeb9b7c | ||
|
|
ba8979e4ee | ||
|
|
c157c08e5a | ||
|
|
effcfcc76f | ||
|
|
d787838ed2 | ||
|
|
86a822f0db |
68
.github/workflows/build-image.yml
vendored
68
.github/workflows/build-image.yml
vendored
@ -4,45 +4,51 @@ on:
|
|||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- '*'
|
- '*'
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
tag_name:
|
||||||
|
description: 'Tag name for the Docker image'
|
||||||
|
required: false
|
||||||
|
default: 'latest'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_and_push:
|
build_and_push:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Cache Docker layers
|
- name: Cache Docker layers
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: /tmp/.buildx-cache
|
path: /tmp/.buildx-cache
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-buildx-
|
${{ runner.os }}-buildx-
|
||||||
|
|
||||||
- name: Log in to Docker Hub
|
- name: Log in to Docker Hub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||||
registry: docker.io
|
registry: docker.io
|
||||||
|
|
||||||
- name: Build and push Docker image
|
- name: Build and push Docker image
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
push: true
|
push: true
|
||||||
tags: |
|
tags: |
|
||||||
ihmily/douyin-live-recorder:${{ github.ref_name }}
|
ihmily/douyin-live-recorder:${{ github.event.inputs.tag_name || github.ref_name }}
|
||||||
ihmily/douyin-live-recorder:latest
|
ihmily/douyin-live-recorder:latest
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
cache-from: type=local,src=/tmp/.buildx-cache
|
cache-from: type=local,src=/tmp/.buildx-cache
|
||||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -90,7 +90,7 @@ node-v*.zip
|
|||||||
# pyenv
|
# pyenv
|
||||||
# For a library or package, you might want to ignore these files since the code is
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
# intended to run in multiple environments; otherwise, check them in:
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
# .python-version
|
.python-version
|
||||||
|
|
||||||
# pipenv
|
# pipenv
|
||||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
|||||||
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2023 Hmily
|
Copyright (c) 2025 Hmily
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
135
README.md
135
README.md
@ -31,7 +31,7 @@
|
|||||||
- [x] 猫耳FM
|
- [x] 猫耳FM
|
||||||
- [x] Look直播
|
- [x] Look直播
|
||||||
- [x] WinkTV
|
- [x] WinkTV
|
||||||
- [x] FlexTV
|
- [x] TTingLive(原Flextv)
|
||||||
- [x] PopkonTV
|
- [x] PopkonTV
|
||||||
- [x] TwitCasting
|
- [x] TwitCasting
|
||||||
- [x] 百度直播
|
- [x] 百度直播
|
||||||
@ -61,6 +61,10 @@
|
|||||||
- [x] 淘宝
|
- [x] 淘宝
|
||||||
- [x] 京东
|
- [x] 京东
|
||||||
- [x] Faceit
|
- [x] Faceit
|
||||||
|
- [x] 咪咕
|
||||||
|
- [x] 连接直播
|
||||||
|
- [x] 来秀直播
|
||||||
|
- [x] Picarto
|
||||||
- [ ] 更多平台正在更新中
|
- [ ] 更多平台正在更新中
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
@ -80,6 +84,7 @@
|
|||||||
├── utils.py -> (contains utility functions)
|
├── utils.py -> (contains utility functions)
|
||||||
├── logger.py -> (logger handdle)
|
├── logger.py -> (logger handdle)
|
||||||
├── room.py -> (get room info)
|
├── room.py -> (get room info)
|
||||||
|
├── ab_sign.py-> (generate dy token)
|
||||||
├── /javascript -> (some decrypt code)
|
├── /javascript -> (some decrypt code)
|
||||||
├── main.py -> (main file)
|
├── main.py -> (main file)
|
||||||
├── ffmpeg_install.py -> (ffmpeg install script)
|
├── ffmpeg_install.py -> (ffmpeg install script)
|
||||||
@ -174,7 +179,7 @@ https://look.163.com/live?id=65108820&position=3
|
|||||||
WinkTV:
|
WinkTV:
|
||||||
https://www.winktv.co.kr/live/play/anjer1004
|
https://www.winktv.co.kr/live/play/anjer1004
|
||||||
|
|
||||||
FlexTV:
|
FlexTV(TTinglive)::
|
||||||
https://www.flextv.co.kr/channels/593127/live
|
https://www.flextv.co.kr/channels/593127/live
|
||||||
|
|
||||||
PopkonTV:
|
PopkonTV:
|
||||||
@ -257,6 +262,7 @@ Youtube:
|
|||||||
https://www.youtube.com/watch?v=cS6zS5hi1w0
|
https://www.youtube.com/watch?v=cS6zS5hi1w0
|
||||||
|
|
||||||
淘宝(需cookie):
|
淘宝(需cookie):
|
||||||
|
https://tbzb.taobao.com/live?liveId=532359023188
|
||||||
https://m.tb.cn/h.TWp0HTd
|
https://m.tb.cn/h.TWp0HTd
|
||||||
|
|
||||||
京东:
|
京东:
|
||||||
@ -264,12 +270,24 @@ https://3.cn/28MLBy-E
|
|||||||
|
|
||||||
Faceit:
|
Faceit:
|
||||||
https://www.faceit.com/zh/players/Compl1/stream
|
https://www.faceit.com/zh/players/Compl1/stream
|
||||||
|
|
||||||
|
连接直播:
|
||||||
|
https://show.lailianjie.com/10000258
|
||||||
|
|
||||||
|
咪咕直播:
|
||||||
|
https://www.miguvideo.com/p/live/120000541321
|
||||||
|
|
||||||
|
来秀直播:
|
||||||
|
https://www.imkktv.com/h5/share/video.html?uid=1845195&roomId=1710496
|
||||||
|
|
||||||
|
Picarto:
|
||||||
|
https://www.picarto.tv/cuteavalanche
|
||||||
```
|
```
|
||||||
|
|
||||||
 
|
 
|
||||||
|
|
||||||
## 🎃源码运行
|
## 🎃源码运行
|
||||||
使用源码运行,前提要有**Python>=3.10**环境,如果没有请先自行安装Python,再执行下面步骤。
|
使用源码运行,可参考下面的步骤。
|
||||||
|
|
||||||
1.首先拉取或手动下载本仓库项目代码
|
1.首先拉取或手动下载本仓库项目代码
|
||||||
|
|
||||||
@ -281,9 +299,94 @@ git clone https://github.com/ihmily/DouyinLiveRecorder.git
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
cd DouyinLiveRecorder
|
cd DouyinLiveRecorder
|
||||||
pip3 install -r requirements.txt
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
> [!TIP]
|
||||||
|
> - 不论你是否已安装 **Python>=3.10** 环境, 都推荐使用 [**uv**](https://github.com/astral-sh/uv) 运行, 因为它可以自动管理虚拟环境和方便地管理 **Python** 版本, **不过这完全是可选的**<br />
|
||||||
|
> 使用以下命令安装
|
||||||
|
> ```bash
|
||||||
|
> # 在 macOS 和 Linux 上安装 uv
|
||||||
|
> curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
> ```
|
||||||
|
> ```powershell
|
||||||
|
> # 在 Windows 上安装 uv
|
||||||
|
> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"
|
||||||
|
> ```
|
||||||
|
> - 如果安装依赖速度太慢, 你可以考虑使用国内 pip 镜像源:<br />
|
||||||
|
> 在 `pip` 命令使用 `-i` 参数指定, 如 `pip3 install -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple`<br />
|
||||||
|
> 或者在 `uv` 命令 `--index` 选项指定, 如 `uv sync --index https://pypi.tuna.tsinghua.edu.cn/simple`
|
||||||
|
|
||||||
|
<details>
|
||||||
|
|
||||||
|
<summary>如果已安装 <b>Python>=3.10</b> 环境</summary>
|
||||||
|
|
||||||
|
- :white_check_mark: 在虚拟环境中安装 (推荐)
|
||||||
|
|
||||||
|
1. 创建虚拟环境
|
||||||
|
|
||||||
|
- 使用系统已安装的 Python, 不使用 uv
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python -m venv .venv
|
||||||
|
```
|
||||||
|
|
||||||
|
- 使用 uv, 默认使用系统 Python, 你可以添加 `--python` 选项指定 Python 版本而不使用系统 Python [uv官方文档](https://docs.astral.sh/uv/concepts/python-versions/)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
uv venv
|
||||||
|
```
|
||||||
|
|
||||||
|
2. 在终端激活虚拟环境 (在未安装 uv 或你想要手动激活虚拟环境时执行, 若已安装 uv, 可以跳过这一步, uv 会自动激活并使用虚拟环境)
|
||||||
|
|
||||||
|
**Bash** 中
|
||||||
|
```bash
|
||||||
|
source .venv/Scripts/activate
|
||||||
|
```
|
||||||
|
|
||||||
|
**Powershell** 中
|
||||||
|
```powershell
|
||||||
|
.venv\Scripts\activate.ps1
|
||||||
|
```
|
||||||
|
|
||||||
|
**Windows CMD** 中
|
||||||
|
```bat
|
||||||
|
.venv\Scripts\activate.bat
|
||||||
|
```
|
||||||
|
|
||||||
|
3. 安装依赖
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 使用 pip (若安装太慢或失败, 可使用 `-i` 指定镜像源)
|
||||||
|
pip3 install -U pip && pip3 install -r requirements.txt
|
||||||
|
# 或者使用 uv (可使用 `--index` 指定镜像源)
|
||||||
|
uv sync
|
||||||
|
# 或者
|
||||||
|
uv pip sync requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
- :x: 在系统 Python 环境中安装 (不推荐)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip3 install -U pip && pip3 install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
<details>
|
||||||
|
|
||||||
|
<summary>如果未安装 <b>Python>=3.10</b> 环境</summary>
|
||||||
|
|
||||||
|
你可以使用 [**uv**](https://github.com/astral-sh/uv) 安装依赖
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# uv 将使用 3.10 及以上的最新 python 发行版自动创建并使用虚拟环境, 可使用 --python 选项指定 python 版本, 参见 https://docs.astral.sh/uv/reference/cli/#uv-sync--python 和 https://docs.astral.sh/uv/reference/cli/#uv-pip-sync--python
|
||||||
|
uv sync
|
||||||
|
# 或
|
||||||
|
uv pip sync requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
3.安装[FFmpeg](https://ffmpeg.org/download.html#build-linux),如果是Windows系统,这一步可跳过。对于Linux系统,执行以下命令安装
|
3.安装[FFmpeg](https://ffmpeg.org/download.html#build-linux),如果是Windows系统,这一步可跳过。对于Linux系统,执行以下命令安装
|
||||||
|
|
||||||
CentOS执行
|
CentOS执行
|
||||||
@ -316,6 +419,12 @@ brew install ffmpeg
|
|||||||
|
|
||||||
```python
|
```python
|
||||||
python main.py
|
python main.py
|
||||||
|
|
||||||
|
```
|
||||||
|
或
|
||||||
|
|
||||||
|
```bash
|
||||||
|
uv run main.py
|
||||||
```
|
```
|
||||||
|
|
||||||
其中Linux系统请使用`python3 main.py` 运行。
|
其中Linux系统请使用`python3 main.py` 运行。
|
||||||
@ -372,6 +481,13 @@ docker-compose stop
|
|||||||
|
|
||||||
 
|
 
|
||||||
|
|
||||||
|
## 🤖相关项目
|
||||||
|
|
||||||
|
- StreamCap: https://github.com/ihmily/StreamCap
|
||||||
|
- streamget: https://github.com/ihmily/streamget
|
||||||
|
|
||||||
|
 
|
||||||
|
|
||||||
## ❤️贡献者
|
## ❤️贡献者
|
||||||
|
|
||||||
   [](https://github.com/ihmily)
|
   [](https://github.com/ihmily)
|
||||||
@ -391,10 +507,21 @@ docker-compose stop
|
|||||||
|
|
||||||
   [](https://github.com/HoratioShaw)
|
   [](https://github.com/HoratioShaw)
|
||||||
[](https://github.com/nov30th)
|
[](https://github.com/nov30th)
|
||||||
|
[](https://github.com/727155455)
|
||||||
|
[](https://github.com/nixingshiguang)
|
||||||
|
[](https://github.com/1411430556)
|
||||||
|
[](https://github.com/Ovear)
|
||||||
 
|
 
|
||||||
|
|
||||||
## ⏳提交日志
|
## ⏳提交日志
|
||||||
|
|
||||||
|
- 20251024
|
||||||
|
- 修复抖音风控无法获取数据问题
|
||||||
|
|
||||||
|
- 新增soop.com录制支持
|
||||||
|
|
||||||
|
- 修复bigo录制
|
||||||
|
|
||||||
- 20250127
|
- 20250127
|
||||||
- 新增淘宝、京东、faceit直播录制
|
- 新增淘宝、京东、faceit直播录制
|
||||||
- 修复小红书直播流录制以及转码问题
|
- 修复小红书直播流录制以及转码问题
|
||||||
|
|||||||
@ -30,31 +30,32 @@ mp4格式重新编码为h264 = 否
|
|||||||
额外使用代理录制的平台(逗号分隔) =
|
额外使用代理录制的平台(逗号分隔) =
|
||||||
|
|
||||||
[推送配置]
|
[推送配置]
|
||||||
# 可选微信|钉钉|tg|邮箱|bark|ntfy 可填多个
|
# 可选微信|钉钉|tg|邮箱|bark|ntfy|pushplus 可填多个
|
||||||
直播状态推送渠道 =
|
直播状态推送渠道 =
|
||||||
钉钉推送接口链接 =
|
钉钉推送接口链接 =
|
||||||
微信推送接口链接 =
|
微信推送接口链接 =
|
||||||
bark推送接口链接 =
|
bark推送接口链接 =
|
||||||
bark推送中断级别 = active
|
bark推送中断级别 = active
|
||||||
bark推送铃声 =
|
bark推送铃声 =
|
||||||
钉钉通知@对象(填手机号) =
|
钉钉通知@对象(填手机号) =
|
||||||
钉钉通知@全体(是/否) = 否
|
钉钉通知@全体(是/否) = 否
|
||||||
tgapi令牌 =
|
tgapi令牌 =
|
||||||
tg聊天id(个人或者群组id) =
|
tg聊天id(个人或者群组id) =
|
||||||
smtp邮件服务器 =
|
smtp邮件服务器 =
|
||||||
是否使用SMTP服务SSL加密(是/否) =
|
是否使用SMTP服务SSL加密(是/否) =
|
||||||
SMTP邮件服务器端口 =
|
SMTP邮件服务器端口 =
|
||||||
邮箱登录账号 =
|
邮箱登录账号 =
|
||||||
发件人密码(授权码) =
|
发件人密码(授权码) =
|
||||||
发件人邮箱 =
|
发件人邮箱 =
|
||||||
发件人显示昵称 =
|
发件人显示昵称 =
|
||||||
收件人邮箱 =
|
收件人邮箱 =
|
||||||
ntfy推送地址 = https://ntfy.sh/xxxx
|
ntfy推送地址 = https://ntfy.sh/xxxx
|
||||||
ntfy推送标签 = tada
|
ntfy推送标签 = tada
|
||||||
ntfy推送邮箱 =
|
ntfy推送邮箱 =
|
||||||
自定义推送标题 =
|
pushplus推送token =
|
||||||
自定义开播推送内容 =
|
自定义推送标题 =
|
||||||
自定义关播推送内容 =
|
自定义开播推送内容 =
|
||||||
|
自定义关播推送内容 =
|
||||||
只推送通知不录制(是/否) = 否
|
只推送通知不录制(是/否) = 否
|
||||||
直播推送检测频率(秒) = 1800
|
直播推送检测频率(秒) = 1800
|
||||||
开播推送开启(是/否) = 是
|
开播推送开启(是/否) = 是
|
||||||
@ -63,63 +64,68 @@ ntfy推送邮箱 =
|
|||||||
[Cookie]
|
[Cookie]
|
||||||
# 录制抖音必填
|
# 录制抖音必填
|
||||||
抖音cookie = ttwid=1%7CB1qls3GdnZhUov9o2NxOMxxYS2ff6OSvEWbv0ytbES4%7C1680522049%7C280d802d6d478e3e78d0c807f7c487e7ffec0ae4e5fdd6a0fe74c3c6af149511; my_rd=1; passport_csrf_token=3ab34460fa656183fccfb904b16ff742; passport_csrf_token_default=3ab34460fa656183fccfb904b16ff742; d_ticket=9f562383ac0547d0b561904513229d76c9c21; n_mh=hvnJEQ4Q5eiH74-84kTFUyv4VK8xtSrpRZG1AhCeFNI; store-region=cn-fj; store-region-src=uid; LOGIN_STATUS=1; __security_server_data_status=1; FORCE_LOGIN=%7B%22videoConsumedRemainSeconds%22%3A180%7D; pwa2=%223%7C0%7C3%7C0%22; download_guide=%223%2F20230729%2F0%22; volume_info=%7B%22isUserMute%22%3Afalse%2C%22isMute%22%3Afalse%2C%22volume%22%3A0.6%7D; strategyABtestKey=%221690824679.923%22; stream_recommend_feed_params=%22%7B%5C%22cookie_enabled%5C%22%3Atrue%2C%5C%22screen_width%5C%22%3A1536%2C%5C%22screen_height%5C%22%3A864%2C%5C%22browser_online%5C%22%3Atrue%2C%5C%22cpu_core_num%5C%22%3A8%2C%5C%22device_memory%5C%22%3A8%2C%5C%22downlink%5C%22%3A10%2C%5C%22effective_type%5C%22%3A%5C%224g%5C%22%2C%5C%22round_trip_time%5C%22%3A150%7D%22; VIDEO_FILTER_MEMO_SELECT=%7B%22expireTime%22%3A1691443863751%2C%22type%22%3Anull%7D; home_can_add_dy_2_desktop=%221%22; __live_version__=%221.1.1.2169%22; device_web_cpu_core=8; device_web_memory_size=8; xgplayer_user_id=346045893336; csrf_session_id=2e00356b5cd8544d17a0e66484946f28; odin_tt=724eb4dd23bc6ffaed9a1571ac4c757ef597768a70c75fef695b95845b7ffcd8b1524278c2ac31c2587996d058e03414595f0a4e856c53bd0d5e5f56dc6d82e24004dc77773e6b83ced6f80f1bb70627; __ac_nonce=064caded4009deafd8b89; __ac_signature=_02B4Z6wo00f01HLUuwwAAIDBh6tRkVLvBQBy9L-AAHiHf7; ttcid=2e9619ebbb8449eaa3d5a42d8ce88ec835; webcast_leading_last_show_time=1691016922379; webcast_leading_total_show_times=1; webcast_local_quality=sd; live_can_add_dy_2_desktop=%221%22; msToken=1JDHnVPw_9yTvzIrwb7cQj8dCMNOoesXbA_IooV8cezcOdpe4pzusZE7NB7tZn9TBXPr0ylxmv-KMs5rqbNUBHP4P7VBFUu0ZAht_BEylqrLpzgt3y5ne_38hXDOX8o=; msToken=jV_yeN1IQKUd9PlNtpL7k5vthGKcHo0dEh_QPUQhr8G3cuYv-Jbb4NnIxGDmhVOkZOCSihNpA2kvYtHiTW25XNNX_yrsv5FN8O6zm3qmCIXcEe0LywLn7oBO2gITEeg=; tt_scid=mYfqpfbDjqXrIGJuQ7q-DlQJfUSG51qG.KUdzztuGP83OjuVLXnQHjsz-BRHRJu4e986
|
抖音cookie = ttwid=1%7CB1qls3GdnZhUov9o2NxOMxxYS2ff6OSvEWbv0ytbES4%7C1680522049%7C280d802d6d478e3e78d0c807f7c487e7ffec0ae4e5fdd6a0fe74c3c6af149511; my_rd=1; passport_csrf_token=3ab34460fa656183fccfb904b16ff742; passport_csrf_token_default=3ab34460fa656183fccfb904b16ff742; d_ticket=9f562383ac0547d0b561904513229d76c9c21; n_mh=hvnJEQ4Q5eiH74-84kTFUyv4VK8xtSrpRZG1AhCeFNI; store-region=cn-fj; store-region-src=uid; LOGIN_STATUS=1; __security_server_data_status=1; FORCE_LOGIN=%7B%22videoConsumedRemainSeconds%22%3A180%7D; pwa2=%223%7C0%7C3%7C0%22; download_guide=%223%2F20230729%2F0%22; volume_info=%7B%22isUserMute%22%3Afalse%2C%22isMute%22%3Afalse%2C%22volume%22%3A0.6%7D; strategyABtestKey=%221690824679.923%22; stream_recommend_feed_params=%22%7B%5C%22cookie_enabled%5C%22%3Atrue%2C%5C%22screen_width%5C%22%3A1536%2C%5C%22screen_height%5C%22%3A864%2C%5C%22browser_online%5C%22%3Atrue%2C%5C%22cpu_core_num%5C%22%3A8%2C%5C%22device_memory%5C%22%3A8%2C%5C%22downlink%5C%22%3A10%2C%5C%22effective_type%5C%22%3A%5C%224g%5C%22%2C%5C%22round_trip_time%5C%22%3A150%7D%22; VIDEO_FILTER_MEMO_SELECT=%7B%22expireTime%22%3A1691443863751%2C%22type%22%3Anull%7D; home_can_add_dy_2_desktop=%221%22; __live_version__=%221.1.1.2169%22; device_web_cpu_core=8; device_web_memory_size=8; xgplayer_user_id=346045893336; csrf_session_id=2e00356b5cd8544d17a0e66484946f28; odin_tt=724eb4dd23bc6ffaed9a1571ac4c757ef597768a70c75fef695b95845b7ffcd8b1524278c2ac31c2587996d058e03414595f0a4e856c53bd0d5e5f56dc6d82e24004dc77773e6b83ced6f80f1bb70627; __ac_nonce=064caded4009deafd8b89; __ac_signature=_02B4Z6wo00f01HLUuwwAAIDBh6tRkVLvBQBy9L-AAHiHf7; ttcid=2e9619ebbb8449eaa3d5a42d8ce88ec835; webcast_leading_last_show_time=1691016922379; webcast_leading_total_show_times=1; webcast_local_quality=sd; live_can_add_dy_2_desktop=%221%22; msToken=1JDHnVPw_9yTvzIrwb7cQj8dCMNOoesXbA_IooV8cezcOdpe4pzusZE7NB7tZn9TBXPr0ylxmv-KMs5rqbNUBHP4P7VBFUu0ZAht_BEylqrLpzgt3y5ne_38hXDOX8o=; msToken=jV_yeN1IQKUd9PlNtpL7k5vthGKcHo0dEh_QPUQhr8G3cuYv-Jbb4NnIxGDmhVOkZOCSihNpA2kvYtHiTW25XNNX_yrsv5FN8O6zm3qmCIXcEe0LywLn7oBO2gITEeg=; tt_scid=mYfqpfbDjqXrIGJuQ7q-DlQJfUSG51qG.KUdzztuGP83OjuVLXnQHjsz-BRHRJu4e986
|
||||||
快手cookie =
|
快手cookie =
|
||||||
tiktok_cookie =
|
tiktok_cookie =
|
||||||
虎牙cookie =
|
虎牙cookie =
|
||||||
斗鱼cookie =
|
斗鱼cookie =
|
||||||
yy_cookie =
|
yy_cookie =
|
||||||
b站cookie =
|
b站cookie =
|
||||||
小红书cookie =
|
小红书cookie =
|
||||||
bigo_cookie =
|
bigo_cookie =
|
||||||
blued_cookie =
|
blued_cookie =
|
||||||
sooplive_cookie =
|
sooplive_cookie =
|
||||||
netease_cookie =
|
netease_cookie =
|
||||||
千度热播_cookie =
|
千度热播_cookie =
|
||||||
pandatv_cookie =
|
pandatv_cookie =
|
||||||
猫耳fm_cookie =
|
猫耳fm_cookie =
|
||||||
winktv_cookie =
|
winktv_cookie =
|
||||||
flextv_cookie =
|
flextv_cookie =
|
||||||
look_cookie =
|
look_cookie =
|
||||||
twitcasting_cookie =
|
twitcasting_cookie =
|
||||||
baidu_cookie =
|
baidu_cookie =
|
||||||
weibo_cookie =
|
weibo_cookie =
|
||||||
kugou_cookie =
|
kugou_cookie =
|
||||||
twitch_cookie =
|
twitch_cookie =
|
||||||
liveme_cookie =
|
liveme_cookie =
|
||||||
huajiao_cookie =
|
huajiao_cookie =
|
||||||
liuxing_cookie =
|
liuxing_cookie =
|
||||||
showroom_cookie =
|
showroom_cookie =
|
||||||
acfun_cookie =
|
acfun_cookie =
|
||||||
changliao_cookie =
|
changliao_cookie =
|
||||||
yinbo_cookie =
|
yinbo_cookie =
|
||||||
yingke_cookie =
|
yingke_cookie =
|
||||||
zhihu_cookie =
|
zhihu_cookie =
|
||||||
chzzk_cookie =
|
chzzk_cookie =
|
||||||
haixiu_cookie =
|
haixiu_cookie =
|
||||||
vvxqiu_cookie =
|
vvxqiu_cookie =
|
||||||
17live_cookie =
|
17live_cookie =
|
||||||
langlive_cookie =
|
langlive_cookie =
|
||||||
pplive_cookie =
|
pplive_cookie =
|
||||||
6room_cookie =
|
6room_cookie =
|
||||||
lehaitv_cookie =
|
lehaitv_cookie =
|
||||||
huamao_cookie =
|
huamao_cookie =
|
||||||
shopee_cookie =
|
shopee_cookie =
|
||||||
youtube_cookie =
|
youtube_cookie =
|
||||||
taobao_cookie =
|
taobao_cookie =
|
||||||
jd_cookie =
|
jd_cookie =
|
||||||
faceit_cookie =
|
faceit_cookie =
|
||||||
|
migu_cookie =
|
||||||
|
lianjie_cookie =
|
||||||
|
laixiu_cookie =
|
||||||
|
picarto_cookie =
|
||||||
|
|
||||||
|
|
||||||
[Authorization]
|
[Authorization]
|
||||||
popkontv_token =
|
popkontv_token =
|
||||||
|
|
||||||
[账号密码]
|
[账号密码]
|
||||||
sooplive账号 =
|
sooplive账号 =
|
||||||
sooplive密码 =
|
sooplive密码 =
|
||||||
flextv账号 =
|
flextv账号 =
|
||||||
flextv密码 =
|
flextv密码 =
|
||||||
popkontv账号 =
|
popkontv账号 =
|
||||||
partner_code = P-00001
|
partner_code = P-00001
|
||||||
popkontv密码 =
|
popkontv密码 =
|
||||||
twitcasting账号类型 = normal
|
twitcasting账号类型 = normal
|
||||||
twitcasting账号 =
|
twitcasting账号 =
|
||||||
twitcasting密码 =
|
twitcasting密码 =
|
||||||
18
demo.py
18
demo.py
@ -71,7 +71,7 @@ LIVE_STREAM_CONFIG = {
|
|||||||
"func": spider.get_winktv_stream_data,
|
"func": spider.get_winktv_stream_data,
|
||||||
},
|
},
|
||||||
"flextv": {
|
"flextv": {
|
||||||
"url": "https://www.flextv.co.kr/channels/593127/live",
|
"url": "https://www.ttinglive.com/channels/685479/live",
|
||||||
"func": spider.get_flextv_stream_data,
|
"func": spider.get_flextv_stream_data,
|
||||||
},
|
},
|
||||||
"looklive": {
|
"looklive": {
|
||||||
@ -190,6 +190,22 @@ LIVE_STREAM_CONFIG = {
|
|||||||
"faceit": {
|
"faceit": {
|
||||||
"url": "https://www.faceit.com/zh/players/Compl1/stream",
|
"url": "https://www.faceit.com/zh/players/Compl1/stream",
|
||||||
"func": spider.get_faceit_stream_data,
|
"func": spider.get_faceit_stream_data,
|
||||||
|
},
|
||||||
|
"lianjie": {
|
||||||
|
"url": "https://show.lailianjie.com/10000258",
|
||||||
|
"func": spider.get_lianjie_stream_url,
|
||||||
|
},
|
||||||
|
"migu": {
|
||||||
|
"url": "https://www.miguvideo.com/p/live/120000541321",
|
||||||
|
"func": spider.get_migu_stream_url,
|
||||||
|
},
|
||||||
|
"laixiu": {
|
||||||
|
"url": "https://www.imkktv.com/h5/share/video.html?uid=1845195&roomId=1710496",
|
||||||
|
"func": spider.get_laixiu_stream_url,
|
||||||
|
},
|
||||||
|
"picarto": {
|
||||||
|
"url": "https://www.picarto.tv/cuteavalanche",
|
||||||
|
"func": spider.get_picarto_stream_url,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -122,7 +122,7 @@ def install_ffmpeg_linux():
|
|||||||
logger.debug("Trying to install the stable version of ffmpeg")
|
logger.debug("Trying to install the stable version of ffmpeg")
|
||||||
result = subprocess.run(['yum', '-y', 'update'], capture_output=True)
|
result = subprocess.run(['yum', '-y', 'update'], capture_output=True)
|
||||||
if result.returncode != 0:
|
if result.returncode != 0:
|
||||||
logger.error(f"Failed to update package lists using yum.")
|
logger.error("Failed to update package lists using yum.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
result = subprocess.run(['yum', 'install', '-y', 'ffmpeg'], capture_output=True)
|
result = subprocess.run(['yum', 'install', '-y', 'ffmpeg'], capture_output=True)
|
||||||
|
|||||||
282
main.py
282
main.py
@ -4,7 +4,7 @@
|
|||||||
Author: Hmily
|
Author: Hmily
|
||||||
GitHub: https://github.com/ihmily
|
GitHub: https://github.com/ihmily
|
||||||
Date: 2023-07-17 23:52:05
|
Date: 2023-07-17 23:52:05
|
||||||
Update: 2025-02-08 19:19:00
|
Update: 2025-10-23 19:48:05
|
||||||
Copyright (c) 2023-2025 by Hmily, All Rights Reserved.
|
Copyright (c) 2023-2025 by Hmily, All Rights Reserved.
|
||||||
Function: Record live stream video.
|
Function: Record live stream video.
|
||||||
"""
|
"""
|
||||||
@ -22,27 +22,27 @@ import shutil
|
|||||||
import random
|
import random
|
||||||
import uuid
|
import uuid
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import urllib.parse
|
|
||||||
import urllib.request
|
import urllib.request
|
||||||
from urllib.error import URLError, HTTPError
|
from urllib.error import URLError, HTTPError
|
||||||
from typing import Any
|
from typing import Any
|
||||||
import configparser
|
import configparser
|
||||||
|
import httpx
|
||||||
from src import spider, stream
|
from src import spider, stream
|
||||||
from src.proxy import ProxyDetector
|
from src.proxy import ProxyDetector
|
||||||
from src.utils import logger
|
from src.utils import logger
|
||||||
from src import utils
|
from src import utils
|
||||||
from msg_push import (
|
from msg_push import (
|
||||||
dingtalk, xizhi, tg_bot, send_email, bark, ntfy
|
dingtalk, xizhi, tg_bot, send_email, bark, ntfy, pushplus
|
||||||
)
|
)
|
||||||
from ffmpeg_install import (
|
from ffmpeg_install import (
|
||||||
check_ffmpeg, ffmpeg_path, current_env_path
|
check_ffmpeg, ffmpeg_path, current_env_path
|
||||||
)
|
)
|
||||||
|
|
||||||
version = "v4.0.3"
|
version = "v4.0.7"
|
||||||
platforms = ("\n国内站点:抖音|快手|虎牙|斗鱼|YY|B站|小红书|bigo|blued|网易CC|千度热播|猫耳FM|Look|TwitCasting|百度|微博|"
|
platforms = ("\n国内站点:抖音|快手|虎牙|斗鱼|YY|B站|小红书|bigo|blued|网易CC|千度热播|猫耳FM|Look|TwitCasting|百度|微博|"
|
||||||
"酷狗|花椒|流星|Acfun|畅聊|映客|音播|知乎|嗨秀|VV星球|17Live|浪Live|漂漂|六间房|乐嗨|花猫|淘宝|京东"
|
"酷狗|花椒|流星|Acfun|畅聊|映客|音播|知乎|嗨秀|VV星球|17Live|浪Live|漂漂|六间房|乐嗨|花猫|淘宝|京东|咪咕|连接|来秀"
|
||||||
"\n海外站点:TikTok|SOOP|PandaTV|WinkTV|FlexTV|PopkonTV|TwitchTV|LiveMe|ShowRoom|CHZZK|Shopee|"
|
"\n海外站点:TikTok|SOOP|PandaTV|WinkTV|FlexTV|PopkonTV|TwitchTV|LiveMe|ShowRoom|CHZZK|Shopee|"
|
||||||
"Youtube|Faceit")
|
"Youtube|Faceit|Picarto")
|
||||||
|
|
||||||
recording = set()
|
recording = set()
|
||||||
error_count = 0
|
error_count = 0
|
||||||
@ -92,7 +92,7 @@ def display_info() -> None:
|
|||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
sys.stdout.flush() # 强制刷新输出缓冲区
|
sys.stdout.flush()
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
if Path(sys.executable).name != 'pythonw.exe':
|
if Path(sys.executable).name != 'pythonw.exe':
|
||||||
os.system(clear_command)
|
os.system(clear_command)
|
||||||
@ -102,9 +102,9 @@ def display_info() -> None:
|
|||||||
if split_video_by_time:
|
if split_video_by_time:
|
||||||
print(f"录制分段开启: {split_time}秒", end=" | ")
|
print(f"录制分段开启: {split_time}秒", end=" | ")
|
||||||
else:
|
else:
|
||||||
print(f"录制分段开启: 否", end=" | ")
|
print("录制分段开启: 否", end=" | ")
|
||||||
if create_time_file:
|
if create_time_file:
|
||||||
print(f"是否生成时间文件: 是", end=" | ")
|
print("是否生成时间文件: 是", end=" | ")
|
||||||
print(f"录制视频质量为: {video_record_quality}", end=" | ")
|
print(f"录制视频质量为: {video_record_quality}", end=" | ")
|
||||||
print(f"录制视频格式为: {video_save_type}", end=" | ")
|
print(f"录制视频格式为: {video_save_type}", end=" | ")
|
||||||
print(f"目前瞬时错误数为: {error_count}", end=" | ")
|
print(f"目前瞬时错误数为: {error_count}", end=" | ")
|
||||||
@ -220,7 +220,7 @@ def converts_mp4(converts_file_path: str, is_original_delete: bool = True) -> No
|
|||||||
try:
|
try:
|
||||||
if os.path.exists(converts_file_path) and os.path.getsize(converts_file_path) > 0:
|
if os.path.exists(converts_file_path) and os.path.getsize(converts_file_path) > 0:
|
||||||
if converts_to_h264:
|
if converts_to_h264:
|
||||||
color_obj.print_colored(f"正在转码为MP4格式并重新编码为h264\n", color_obj.YELLOW)
|
color_obj.print_colored("正在转码为MP4格式并重新编码为h264\n", color_obj.YELLOW)
|
||||||
ffmpeg_command = [
|
ffmpeg_command = [
|
||||||
"ffmpeg", "-i", converts_file_path,
|
"ffmpeg", "-i", converts_file_path,
|
||||||
"-c:v", "libx264",
|
"-c:v", "libx264",
|
||||||
@ -231,7 +231,7 @@ def converts_mp4(converts_file_path: str, is_original_delete: bool = True) -> No
|
|||||||
"-f", "mp4", converts_file_path.rsplit('.', maxsplit=1)[0] + ".mp4",
|
"-f", "mp4", converts_file_path.rsplit('.', maxsplit=1)[0] + ".mp4",
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
color_obj.print_colored(f"正在转码为MP4格式\n", color_obj.YELLOW)
|
color_obj.print_colored("正在转码为MP4格式\n", color_obj.YELLOW)
|
||||||
ffmpeg_command = [
|
ffmpeg_command = [
|
||||||
"ffmpeg", "-i", converts_file_path,
|
"ffmpeg", "-i", converts_file_path,
|
||||||
"-c:v", "copy",
|
"-c:v", "copy",
|
||||||
@ -340,6 +340,7 @@ def push_message(record_name: str, live_url: str, content: str) -> None:
|
|||||||
'NTFY': lambda: ntfy(
|
'NTFY': lambda: ntfy(
|
||||||
ntfy_api, title=msg_title, content=content, tags=ntfy_tags, action_url=live_url, email=ntfy_email
|
ntfy_api, title=msg_title, content=content, tags=ntfy_tags, action_url=live_url, email=ntfy_email
|
||||||
),
|
),
|
||||||
|
'PUSHPLUS': lambda: pushplus(pushplus_token, msg_title, content),
|
||||||
}
|
}
|
||||||
|
|
||||||
for platform, func in push_functions.items():
|
for platform, func in push_functions.items():
|
||||||
@ -366,7 +367,7 @@ def run_script(command: str) -> None:
|
|||||||
print(stderr_decoded)
|
print(stderr_decoded)
|
||||||
except PermissionError as e:
|
except PermissionError as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
logger.error(f'脚本无执行权限!, 若是Linux环境, 请先执行:chmod +x your_script.sh 授予脚本可执行权限')
|
logger.error('脚本无执行权限!, 若是Linux环境, 请先执行:chmod +x your_script.sh 授予脚本可执行权限')
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
logger.error('Please add `#!/bin/bash` at the beginning of your bash script file.')
|
logger.error('Please add `#!/bin/bash` at the beginning of your bash script file.')
|
||||||
@ -381,6 +382,41 @@ def clear_record_info(record_name: str, record_url: str) -> None:
|
|||||||
color_obj.print_colored(f"[{record_name}]已经从录制列表中移除\n", color_obj.YELLOW)
|
color_obj.print_colored(f"[{record_name}]已经从录制列表中移除\n", color_obj.YELLOW)
|
||||||
|
|
||||||
|
|
||||||
|
def direct_download_stream(source_url: str, save_path: str, record_name: str, live_url: str, platform: str) -> bool:
|
||||||
|
try:
|
||||||
|
with open(save_path, 'wb') as f:
|
||||||
|
client = httpx.Client(timeout=None)
|
||||||
|
|
||||||
|
headers = {}
|
||||||
|
header_params = get_record_headers(platform, live_url)
|
||||||
|
if header_params:
|
||||||
|
key, value = header_params.split(":", 1)
|
||||||
|
headers[key] = value
|
||||||
|
|
||||||
|
with client.stream('GET', source_url, headers=headers, follow_redirects=True) as response:
|
||||||
|
if response.status_code != 200:
|
||||||
|
logger.error(f"请求直播流失败,状态码: {response.status_code}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
downloaded = 0
|
||||||
|
chunk_size = 1024 * 16
|
||||||
|
|
||||||
|
for chunk in response.iter_bytes(chunk_size):
|
||||||
|
if live_url in url_comments or exit_recording:
|
||||||
|
color_obj.print_colored(f"[{record_name}]录制时已被注释或请求停止,下载中断", color_obj.YELLOW)
|
||||||
|
clear_record_info(record_name, live_url)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if chunk:
|
||||||
|
f.write(chunk)
|
||||||
|
downloaded += len(chunk)
|
||||||
|
print()
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"FLV下载错误: {e} 发生错误的行数: {e.__traceback__.tb_lineno}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def check_subprocess(record_name: str, record_url: str, ffmpeg_command: list, save_type: str,
|
def check_subprocess(record_name: str, record_url: str, ffmpeg_command: list, save_type: str,
|
||||||
script_command: str | None = None) -> bool:
|
script_command: str | None = None) -> bool:
|
||||||
save_file_path = ffmpeg_command[-1]
|
save_file_path = ffmpeg_command[-1]
|
||||||
@ -432,7 +468,7 @@ def check_subprocess(record_name: str, record_url: str, ffmpeg_command: list, sa
|
|||||||
params = [
|
params = [
|
||||||
f'--record_name "{record_name}"',
|
f'--record_name "{record_name}"',
|
||||||
f'--save_file_path "{save_file_path}"',
|
f'--save_file_path "{save_file_path}"',
|
||||||
f'--save_type {save_type}'
|
f'--save_type {save_type}',
|
||||||
f'--split_video_by_time {split_video_by_time}',
|
f'--split_video_by_time {split_video_by_time}',
|
||||||
f'--converts_to_mp4 {converts_to_mp4}',
|
f'--converts_to_mp4 {converts_to_mp4}',
|
||||||
]
|
]
|
||||||
@ -475,6 +511,37 @@ def get_quality_code(qn):
|
|||||||
return QUALITY_MAPPING.get(qn)
|
return QUALITY_MAPPING.get(qn)
|
||||||
|
|
||||||
|
|
||||||
|
def get_record_headers(platform, live_url):
|
||||||
|
live_domain = '/'.join(live_url.split('/')[0:3])
|
||||||
|
record_headers = {
|
||||||
|
'PandaTV': 'origin:https://www.pandalive.co.kr',
|
||||||
|
'WinkTV': 'origin:https://www.winktv.co.kr',
|
||||||
|
'PopkonTV': 'origin:https://www.popkontv.com',
|
||||||
|
'FlexTV': 'origin:https://www.flextv.co.kr',
|
||||||
|
'千度热播': 'referer:https://qiandurebo.com',
|
||||||
|
'17Live': 'referer:https://17.live/en/live/6302408',
|
||||||
|
'浪Live': 'referer:https://www.lang.live',
|
||||||
|
'shopee': f'origin:{live_domain}',
|
||||||
|
'Blued直播': 'referer:https://app.blued.cn'
|
||||||
|
}
|
||||||
|
return record_headers.get(platform)
|
||||||
|
|
||||||
|
|
||||||
|
def is_flv_preferred_platform(link):
|
||||||
|
return any(i in link for i in ["douyin", "tiktok"])
|
||||||
|
|
||||||
|
|
||||||
|
def select_source_url(link, stream_info):
|
||||||
|
if is_flv_preferred_platform(link):
|
||||||
|
codec = utils.get_query_params(stream_info.get('flv_url'), "codec")
|
||||||
|
if codec and codec[0] == 'h265':
|
||||||
|
logger.warning("FLV is not supported for h265 codec, use HLS source instead")
|
||||||
|
else:
|
||||||
|
return stream_info.get('flv_url')
|
||||||
|
|
||||||
|
return stream_info.get('record_url')
|
||||||
|
|
||||||
|
|
||||||
def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
||||||
global error_count
|
global error_count
|
||||||
|
|
||||||
@ -514,7 +581,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
platform = '抖音直播'
|
platform = '抖音直播'
|
||||||
with semaphore:
|
with semaphore:
|
||||||
if 'v.douyin.com' not in record_url and '/user/' not in record_url:
|
if 'v.douyin.com' not in record_url and '/user/' not in record_url:
|
||||||
json_data = asyncio.run(spider.get_douyin_stream_data(
|
json_data = asyncio.run(spider.get_douyin_web_stream_data(
|
||||||
url=record_url,
|
url=record_url,
|
||||||
proxy_addr=proxy_address,
|
proxy_addr=proxy_address,
|
||||||
cookies=dy_cookie))
|
cookies=dy_cookie))
|
||||||
@ -523,7 +590,8 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
url=record_url,
|
url=record_url,
|
||||||
proxy_addr=proxy_address,
|
proxy_addr=proxy_address,
|
||||||
cookies=dy_cookie))
|
cookies=dy_cookie))
|
||||||
port_info = asyncio.run(stream.get_douyin_stream_url(json_data, record_quality))
|
port_info = asyncio.run(
|
||||||
|
stream.get_douyin_stream_url(json_data, record_quality, proxy_address))
|
||||||
|
|
||||||
elif record_url.find("https://www.tiktok.com/") > -1:
|
elif record_url.find("https://www.tiktok.com/") > -1:
|
||||||
platform = 'TikTok直播'
|
platform = 'TikTok直播'
|
||||||
@ -533,7 +601,8 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
url=record_url,
|
url=record_url,
|
||||||
proxy_addr=proxy_address,
|
proxy_addr=proxy_address,
|
||||||
cookies=tiktok_cookie))
|
cookies=tiktok_cookie))
|
||||||
port_info = asyncio.run(stream.get_tiktok_stream_url(json_data, record_quality))
|
port_info = asyncio.run(
|
||||||
|
stream.get_tiktok_stream_url(json_data, record_quality, proxy_address))
|
||||||
else:
|
else:
|
||||||
logger.error("错误信息: 网络异常,请检查网络是否能正常访问TikTok平台")
|
logger.error("错误信息: 网络异常,请检查网络是否能正常访问TikTok平台")
|
||||||
|
|
||||||
@ -594,7 +663,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
record_url, proxy_addr=proxy_address, cookies=xhs_cookie))
|
record_url, proxy_addr=proxy_address, cookies=xhs_cookie))
|
||||||
retry += 1
|
retry += 1
|
||||||
|
|
||||||
elif record_url.find("https://www.bigo.tv/") > -1 or record_url.find("slink.bigovideo.tv/") > -1:
|
elif record_url.find("www.bigo.tv/") > -1 or record_url.find("slink.bigovideo.tv/") > -1:
|
||||||
platform = 'Bigo直播'
|
platform = 'Bigo直播'
|
||||||
with semaphore:
|
with semaphore:
|
||||||
port_info = asyncio.run(spider.get_bigo_stream_url(
|
port_info = asyncio.run(spider.get_bigo_stream_url(
|
||||||
@ -606,7 +675,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
port_info = asyncio.run(spider.get_blued_stream_url(
|
port_info = asyncio.run(spider.get_blued_stream_url(
|
||||||
record_url, proxy_addr=proxy_address, cookies=blued_cookie))
|
record_url, proxy_addr=proxy_address, cookies=blued_cookie))
|
||||||
|
|
||||||
elif record_url.find("sooplive.co.kr/") > -1:
|
elif record_url.find("sooplive.co.kr/") > -1 or record_url.find("sooplive.com/") > -1:
|
||||||
platform = 'SOOP'
|
platform = 'SOOP'
|
||||||
with semaphore:
|
with semaphore:
|
||||||
if global_proxy or proxy_address:
|
if global_proxy or proxy_address:
|
||||||
@ -668,7 +737,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
else:
|
else:
|
||||||
logger.error("错误信息: 网络异常,请检查本网络是否能正常访问WinkTV直播平台")
|
logger.error("错误信息: 网络异常,请检查本网络是否能正常访问WinkTV直播平台")
|
||||||
|
|
||||||
elif record_url.find("www.flextv.co.kr/") > -1:
|
elif record_url.find("www.flextv.co.kr/") > -1 or record_url.find("www.ttinglive.com/") > -1:
|
||||||
platform = 'FlexTV'
|
platform = 'FlexTV'
|
||||||
with semaphore:
|
with semaphore:
|
||||||
if global_proxy or proxy_address:
|
if global_proxy or proxy_address:
|
||||||
@ -683,7 +752,10 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
utils.update_config(
|
utils.update_config(
|
||||||
config_file, 'Cookie', 'flextv_cookie', json_data['new_cookies']
|
config_file, 'Cookie', 'flextv_cookie', json_data['new_cookies']
|
||||||
)
|
)
|
||||||
port_info = asyncio.run(stream.get_stream_url(json_data, record_quality, spec=True))
|
if 'play_url_list' in json_data:
|
||||||
|
port_info = asyncio.run(stream.get_stream_url(json_data, record_quality, spec=True))
|
||||||
|
else:
|
||||||
|
port_info = json_data
|
||||||
else:
|
else:
|
||||||
logger.error("错误信息: 网络异常,请检查本网络是否能正常访问FlexTV直播平台")
|
logger.error("错误信息: 网络异常,请检查本网络是否能正常访问FlexTV直播平台")
|
||||||
|
|
||||||
@ -718,7 +790,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
elif record_url.find("twitcasting.tv/") > -1:
|
elif record_url.find("twitcasting.tv/") > -1:
|
||||||
platform = 'TwitCasting'
|
platform = 'TwitCasting'
|
||||||
with semaphore:
|
with semaphore:
|
||||||
port_info = asyncio.run(spider.get_twitcasting_stream_url(
|
json_data = asyncio.run(spider.get_twitcasting_stream_url(
|
||||||
url=record_url,
|
url=record_url,
|
||||||
proxy_addr=proxy_address,
|
proxy_addr=proxy_address,
|
||||||
cookies=twitcasting_cookie,
|
cookies=twitcasting_cookie,
|
||||||
@ -726,6 +798,8 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
username=twitcasting_username,
|
username=twitcasting_username,
|
||||||
password=twitcasting_password
|
password=twitcasting_password
|
||||||
))
|
))
|
||||||
|
port_info = asyncio.run(stream.get_stream_url(json_data, record_quality, spec=False))
|
||||||
|
|
||||||
if port_info and port_info.get('new_cookies'):
|
if port_info and port_info.get('new_cookies'):
|
||||||
utils.update_config(
|
utils.update_config(
|
||||||
file_path=config_file, section='Cookie', key='twitcasting_cookie',
|
file_path=config_file, section='Cookie', key='twitcasting_cookie',
|
||||||
@ -925,6 +999,30 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
else:
|
else:
|
||||||
logger.error("错误信息: 网络异常,请检查本网络是否能正常访问faceit直播平台")
|
logger.error("错误信息: 网络异常,请检查本网络是否能正常访问faceit直播平台")
|
||||||
|
|
||||||
|
elif record_url.find("www.miguvideo.com") > -1 or record_url.find("m.miguvideo.com") > -1:
|
||||||
|
platform = '咪咕直播'
|
||||||
|
with semaphore:
|
||||||
|
port_info = asyncio.run(spider.get_migu_stream_url(
|
||||||
|
url=record_url, proxy_addr=proxy_address, cookies=migu_cookie))
|
||||||
|
|
||||||
|
elif record_url.find("show.lailianjie.com") > -1:
|
||||||
|
platform = '连接直播'
|
||||||
|
with semaphore:
|
||||||
|
port_info = asyncio.run(spider.get_lianjie_stream_url(
|
||||||
|
url=record_url, proxy_addr=proxy_address, cookies=lianjie_cookie))
|
||||||
|
|
||||||
|
elif record_url.find("www.imkktv.com") > -1:
|
||||||
|
platform = '来秀直播'
|
||||||
|
with semaphore:
|
||||||
|
port_info = asyncio.run(spider.get_laixiu_stream_url(
|
||||||
|
url=record_url, proxy_addr=proxy_address, cookies=laixiu_cookie))
|
||||||
|
|
||||||
|
elif record_url.find("www.picarto.tv") > -1:
|
||||||
|
platform = 'Picarto'
|
||||||
|
with semaphore:
|
||||||
|
port_info = asyncio.run(spider.get_picarto_stream_url(
|
||||||
|
url=record_url, proxy_addr=proxy_address, cookies=picarto_cookie))
|
||||||
|
|
||||||
elif record_url.find(".m3u8") > -1 or record_url.find(".flv") > -1:
|
elif record_url.find(".m3u8") > -1 or record_url.find(".flv") > -1:
|
||||||
platform = '自定义录制直播'
|
platform = '自定义录制直播'
|
||||||
port_info = {
|
port_info = {
|
||||||
@ -1016,7 +1114,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
time.sleep(push_check_seconds)
|
time.sleep(push_check_seconds)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
real_url = port_info.get('record_url')
|
real_url = select_source_url(record_url, port_info)
|
||||||
full_path = f'{default_path}/{platform}'
|
full_path = f'{default_path}/{platform}'
|
||||||
if real_url:
|
if real_url:
|
||||||
now = datetime.datetime.today().strftime("%Y-%m-%d_%H-%M-%S")
|
now = datetime.datetime.today().strftime("%Y-%m-%d_%H-%M-%S")
|
||||||
@ -1052,7 +1150,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
if enable_https_recording and real_url.startswith("http://"):
|
if enable_https_recording and real_url.startswith("http://"):
|
||||||
real_url = real_url.replace("http://", "https://")
|
real_url = real_url.replace("http://", "https://")
|
||||||
|
|
||||||
http_record_list = ['shopee']
|
http_record_list = ['shopee', "migu"]
|
||||||
if platform in http_record_list:
|
if platform in http_record_list:
|
||||||
real_url = real_url.replace("https://", "http://")
|
real_url = real_url.replace("https://", "http://")
|
||||||
|
|
||||||
@ -1096,19 +1194,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
"-avoid_negative_ts", "1"
|
"-avoid_negative_ts", "1"
|
||||||
]
|
]
|
||||||
|
|
||||||
record_headers = {
|
headers = get_record_headers(platform, record_url)
|
||||||
'PandaTV': 'origin:https://www.pandalive.co.kr',
|
|
||||||
'WinkTV': 'origin:https://www.winktv.co.kr',
|
|
||||||
'PopkonTV': 'origin:https://www.popkontv.com',
|
|
||||||
'FlexTV': 'origin:https://www.flextv.co.kr',
|
|
||||||
'千度热播': 'referer:https://qiandurebo.com',
|
|
||||||
'17Live': 'referer:https://17.live/en/live/6302408',
|
|
||||||
'浪Live': 'referer:https://www.lang.live',
|
|
||||||
'shopee': f'origin:{live_domain}',
|
|
||||||
'Blued直播': 'referer:https://app.blued.cn'
|
|
||||||
}
|
|
||||||
|
|
||||||
headers = record_headers.get(platform)
|
|
||||||
if headers:
|
if headers:
|
||||||
ffmpeg_command.insert(11, "-headers")
|
ffmpeg_command.insert(11, "-headers")
|
||||||
ffmpeg_command.insert(12, headers)
|
ffmpeg_command.insert(12, headers)
|
||||||
@ -1124,7 +1210,8 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
if show_url:
|
if show_url:
|
||||||
re_plat = ('WinkTV', 'PandaTV', 'ShowRoom', 'CHZZK', 'Youtube')
|
re_plat = ('WinkTV', 'PandaTV', 'ShowRoom', 'CHZZK', 'Youtube')
|
||||||
if platform in re_plat:
|
if platform in re_plat:
|
||||||
logger.info(f"{platform} | {anchor_name} | 直播源地址: {port_info['m3u8_url']}")
|
logger.info(
|
||||||
|
f"{platform} | {anchor_name} | 直播源地址: {port_info.get('m3u8_url')}")
|
||||||
else:
|
else:
|
||||||
logger.info(
|
logger.info(
|
||||||
f"{platform} | {anchor_name} | 直播源地址: {real_url}")
|
f"{platform} | {anchor_name} | 直播源地址: {real_url}")
|
||||||
@ -1140,10 +1227,18 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
if platform in only_audio_platform_list:
|
if platform in only_audio_platform_list:
|
||||||
only_audio_record = True
|
only_audio_record = True
|
||||||
|
|
||||||
if only_audio_record:
|
record_save_type = video_save_type
|
||||||
|
|
||||||
|
if is_flv_preferred_platform(record_url) and port_info.get('flv_url'):
|
||||||
|
codec = utils.get_query_params(port_info['flv_url'], "codec")
|
||||||
|
if codec and codec[0] == 'h265':
|
||||||
|
logger.warning("FLV is not supported for h265 codec, use TS format instead")
|
||||||
|
record_save_type = "TS"
|
||||||
|
|
||||||
|
if only_audio_record or any(i in record_save_type for i in ['MP3', 'M4A']):
|
||||||
try:
|
try:
|
||||||
now = time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime())
|
now = time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime())
|
||||||
extension = "mp3" if "m4a" not in video_save_type.lower () else "m4a"
|
extension = "mp3" if "m4a" not in record_save_type.lower() else "m4a"
|
||||||
name_format = "_%03d" if split_video_by_time else ""
|
name_format = "_%03d" if split_video_by_time else ""
|
||||||
save_file_path = (f"{full_path}/{anchor_name}_{title_in_name}{now}"
|
save_file_path = (f"{full_path}/{anchor_name}_{title_in_name}{now}"
|
||||||
f"{name_format}.{extension}")
|
f"{name_format}.{extension}")
|
||||||
@ -1151,7 +1246,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
if split_video_by_time:
|
if split_video_by_time:
|
||||||
print(f'\r{anchor_name} 准备开始录制音频: {save_file_path}')
|
print(f'\r{anchor_name} 准备开始录制音频: {save_file_path}')
|
||||||
|
|
||||||
if "MP3" in video_save_type:
|
if "MP3" in record_save_type:
|
||||||
command = [
|
command = [
|
||||||
"-map", "0:a",
|
"-map", "0:a",
|
||||||
"-c:a", "libmp3lame",
|
"-c:a", "libmp3lame",
|
||||||
@ -1175,7 +1270,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
]
|
]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if "MP3" in video_save_type:
|
if "MP3" in record_save_type:
|
||||||
command = [
|
command = [
|
||||||
"-map", "0:a",
|
"-map", "0:a",
|
||||||
"-c:a", "libmp3lame",
|
"-c:a", "libmp3lame",
|
||||||
@ -1198,7 +1293,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
record_name,
|
record_name,
|
||||||
record_url,
|
record_url,
|
||||||
ffmpeg_command,
|
ffmpeg_command,
|
||||||
video_save_type,
|
record_save_type,
|
||||||
custom_script
|
custom_script
|
||||||
)
|
)
|
||||||
if comment_end:
|
if comment_end:
|
||||||
@ -1210,7 +1305,8 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
error_count += 1
|
error_count += 1
|
||||||
error_window.append(1)
|
error_window.append(1)
|
||||||
|
|
||||||
if video_save_type == "FLV" or only_flv_record:
|
if only_flv_record:
|
||||||
|
logger.info(f"Use Direct Downloader to Download FLV Stream: {record_url}")
|
||||||
filename = anchor_name + f'_{title_in_name}' + now + '.flv'
|
filename = anchor_name + f'_{title_in_name}' + now + '.flv'
|
||||||
save_file_path = f'{full_path}/{filename}'
|
save_file_path = f'{full_path}/{filename}'
|
||||||
print(f'{rec_info}/{filename}')
|
print(f'{rec_info}/{filename}')
|
||||||
@ -1227,11 +1323,20 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
try:
|
try:
|
||||||
flv_url = port_info.get('flv_url')
|
flv_url = port_info.get('flv_url')
|
||||||
if flv_url:
|
if flv_url:
|
||||||
_filepath, _ = urllib.request.urlretrieve(flv_url, save_file_path)
|
recording.add(record_name)
|
||||||
record_finished = True
|
start_record_time = datetime.datetime.now()
|
||||||
|
recording_time_list[record_name] = [start_record_time, record_quality_zh]
|
||||||
|
|
||||||
|
download_success = direct_download_stream(
|
||||||
|
flv_url, save_file_path, record_name, record_url, platform
|
||||||
|
)
|
||||||
|
|
||||||
|
if download_success:
|
||||||
|
record_finished = True
|
||||||
|
print(
|
||||||
|
f"\n{anchor_name} {time.strftime('%Y-%m-%d %H:%M:%S')} 直播录制完成\n")
|
||||||
|
|
||||||
recording.discard(record_name)
|
recording.discard(record_name)
|
||||||
print(
|
|
||||||
f"\n{anchor_name} {time.strftime('%Y-%m-%d %H:%M:%S')} 直播录制完成\n")
|
|
||||||
else:
|
else:
|
||||||
logger.debug("未找到FLV直播流,跳过录制")
|
logger.debug("未找到FLV直播流,跳过录制")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -1244,6 +1349,54 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
error_count += 1
|
error_count += 1
|
||||||
error_window.append(1)
|
error_window.append(1)
|
||||||
|
|
||||||
|
elif record_save_type == "FLV":
|
||||||
|
filename = anchor_name + f'_{title_in_name}' + now + ".flv"
|
||||||
|
print(f'{rec_info}/{filename}')
|
||||||
|
save_file_path = full_path + '/' + filename
|
||||||
|
|
||||||
|
try:
|
||||||
|
if split_video_by_time:
|
||||||
|
now = time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime())
|
||||||
|
save_file_path = f"{full_path}/{anchor_name}_{title_in_name}{now}_%03d.flv"
|
||||||
|
command = [
|
||||||
|
"-map", "0",
|
||||||
|
"-c:v", "copy",
|
||||||
|
"-c:a", "copy",
|
||||||
|
"-bsf:a", "aac_adtstoasc",
|
||||||
|
"-f", "segment",
|
||||||
|
"-segment_time", split_time,
|
||||||
|
"-segment_format", "flv",
|
||||||
|
"-reset_timestamps", "1",
|
||||||
|
save_file_path
|
||||||
|
]
|
||||||
|
|
||||||
|
else:
|
||||||
|
command = [
|
||||||
|
"-map", "0",
|
||||||
|
"-c:v", "copy",
|
||||||
|
"-c:a", "copy",
|
||||||
|
"-bsf:a", "aac_adtstoasc",
|
||||||
|
"-f", "flv",
|
||||||
|
"{path}".format(path=save_file_path),
|
||||||
|
]
|
||||||
|
ffmpeg_command.extend(command)
|
||||||
|
|
||||||
|
comment_end = check_subprocess(
|
||||||
|
record_name,
|
||||||
|
record_url,
|
||||||
|
ffmpeg_command,
|
||||||
|
record_save_type,
|
||||||
|
custom_script
|
||||||
|
)
|
||||||
|
if comment_end:
|
||||||
|
return
|
||||||
|
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
logger.error(f"错误信息: {e} 发生错误的行数: {e.__traceback__.tb_lineno}")
|
||||||
|
with max_request_lock:
|
||||||
|
error_count += 1
|
||||||
|
error_window.append(1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if converts_to_mp4:
|
if converts_to_mp4:
|
||||||
seg_file_path = f"{full_path}/{anchor_name}_{title_in_name}{now}_%03d.mp4"
|
seg_file_path = f"{full_path}/{anchor_name}_{title_in_name}{now}_%03d.mp4"
|
||||||
@ -1270,7 +1423,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"转码失败: {e} ")
|
logger.error(f"转码失败: {e} ")
|
||||||
|
|
||||||
elif video_save_type == "MKV":
|
elif record_save_type == "MKV":
|
||||||
filename = anchor_name + f'_{title_in_name}' + now + ".mkv"
|
filename = anchor_name + f'_{title_in_name}' + now + ".mkv"
|
||||||
print(f'{rec_info}/{filename}')
|
print(f'{rec_info}/{filename}')
|
||||||
save_file_path = full_path + '/' + filename
|
save_file_path = full_path + '/' + filename
|
||||||
@ -1306,7 +1459,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
record_name,
|
record_name,
|
||||||
record_url,
|
record_url,
|
||||||
ffmpeg_command,
|
ffmpeg_command,
|
||||||
video_save_type,
|
record_save_type,
|
||||||
custom_script
|
custom_script
|
||||||
)
|
)
|
||||||
if comment_end:
|
if comment_end:
|
||||||
@ -1318,7 +1471,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
error_count += 1
|
error_count += 1
|
||||||
error_window.append(1)
|
error_window.append(1)
|
||||||
|
|
||||||
elif video_save_type == "MP4":
|
elif record_save_type == "MP4":
|
||||||
filename = anchor_name + f'_{title_in_name}' + now + ".mp4"
|
filename = anchor_name + f'_{title_in_name}' + now + ".mp4"
|
||||||
print(f'{rec_info}/{filename}')
|
print(f'{rec_info}/{filename}')
|
||||||
save_file_path = full_path + '/' + filename
|
save_file_path = full_path + '/' + filename
|
||||||
@ -1353,7 +1506,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
record_name,
|
record_name,
|
||||||
record_url,
|
record_url,
|
||||||
ffmpeg_command,
|
ffmpeg_command,
|
||||||
video_save_type,
|
record_save_type,
|
||||||
custom_script
|
custom_script
|
||||||
)
|
)
|
||||||
if comment_end:
|
if comment_end:
|
||||||
@ -1389,7 +1542,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
record_name,
|
record_name,
|
||||||
record_url,
|
record_url,
|
||||||
ffmpeg_command,
|
ffmpeg_command,
|
||||||
video_save_type,
|
record_save_type,
|
||||||
custom_script
|
custom_script
|
||||||
)
|
)
|
||||||
if comment_end:
|
if comment_end:
|
||||||
@ -1433,7 +1586,7 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
|
|||||||
record_name,
|
record_name,
|
||||||
record_url,
|
record_url,
|
||||||
ffmpeg_command,
|
ffmpeg_command,
|
||||||
video_save_type,
|
record_save_type,
|
||||||
custom_script
|
custom_script
|
||||||
)
|
)
|
||||||
if comment_end:
|
if comment_end:
|
||||||
@ -1621,8 +1774,8 @@ try:
|
|||||||
print("System Proxy: http://{}:{}".format(proxy_info.ip, proxy_info.port))
|
print("System Proxy: http://{}:{}".format(proxy_info.ip, proxy_info.port))
|
||||||
except HTTPError as err:
|
except HTTPError as err:
|
||||||
print(f"HTTP error occurred: {err.code} - {err.reason}")
|
print(f"HTTP error occurred: {err.code} - {err.reason}")
|
||||||
except URLError as err:
|
except URLError:
|
||||||
color_obj.print_colored(f"INFO:未检测到全局/规则网络代理,请检查代理配置(若无需录制海外直播请忽略此条提示)",
|
color_obj.print_colored("INFO:未检测到全局/规则网络代理,请检查代理配置(若无需录制海外直播请忽略此条提示)",
|
||||||
color_obj.YELLOW)
|
color_obj.YELLOW)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
print("An unexpected error occurred:", err)
|
print("An unexpected error occurred:", err)
|
||||||
@ -1701,6 +1854,7 @@ while True:
|
|||||||
ntfy_api = read_config_value(config, '推送配置', 'ntfy推送地址', "")
|
ntfy_api = read_config_value(config, '推送配置', 'ntfy推送地址', "")
|
||||||
ntfy_tags = read_config_value(config, '推送配置', 'ntfy推送标签', "tada")
|
ntfy_tags = read_config_value(config, '推送配置', 'ntfy推送标签', "tada")
|
||||||
ntfy_email = read_config_value(config, '推送配置', 'ntfy推送邮箱', "")
|
ntfy_email = read_config_value(config, '推送配置', 'ntfy推送邮箱', "")
|
||||||
|
pushplus_token = read_config_value(config, '推送配置', 'pushplus推送token', "")
|
||||||
push_message_title = read_config_value(config, '推送配置', '自定义推送标题', "直播间状态更新通知")
|
push_message_title = read_config_value(config, '推送配置', '自定义推送标题', "直播间状态更新通知")
|
||||||
begin_push_message_text = read_config_value(config, '推送配置', '自定义开播推送内容', "")
|
begin_push_message_text = read_config_value(config, '推送配置', '自定义开播推送内容', "")
|
||||||
over_push_message_text = read_config_value(config, '推送配置', '自定义关播推送内容', "")
|
over_push_message_text = read_config_value(config, '推送配置', '自定义关播推送内容', "")
|
||||||
@ -1765,8 +1919,12 @@ while True:
|
|||||||
taobao_cookie = read_config_value(config, 'Cookie', 'taobao_cookie', '')
|
taobao_cookie = read_config_value(config, 'Cookie', 'taobao_cookie', '')
|
||||||
jd_cookie = read_config_value(config, 'Cookie', 'jd_cookie', '')
|
jd_cookie = read_config_value(config, 'Cookie', 'jd_cookie', '')
|
||||||
faceit_cookie = read_config_value(config, 'Cookie', 'faceit_cookie', '')
|
faceit_cookie = read_config_value(config, 'Cookie', 'faceit_cookie', '')
|
||||||
|
migu_cookie = read_config_value(config, 'Cookie', 'migu_cookie', '')
|
||||||
|
lianjie_cookie = read_config_value(config, 'Cookie', 'lianjie_cookie', '')
|
||||||
|
laixiu_cookie = read_config_value(config, 'Cookie', 'laixiu_cookie', '')
|
||||||
|
picarto_cookie = read_config_value(config, 'Cookie', 'picarto_cookie', '')
|
||||||
|
|
||||||
video_save_type_list = ("FLV", "MKV", "TS", "MP4", "MP3音频", "M4A音频")
|
video_save_type_list = ("FLV", "MKV", "TS", "MP4", "MP3音频", "M4A音频", "MP3", "M4A")
|
||||||
if video_save_type and video_save_type.upper() in video_save_type_list:
|
if video_save_type and video_save_type.upper() in video_save_type_list:
|
||||||
video_save_type = video_save_type.upper()
|
video_save_type = video_save_type.upper()
|
||||||
else:
|
else:
|
||||||
@ -1794,7 +1952,7 @@ while True:
|
|||||||
delete_line(url_config_file, origin_line)
|
delete_line(url_config_file, origin_line)
|
||||||
line_list.append(origin_line)
|
line_list.append(origin_line)
|
||||||
line = origin_line.strip()
|
line = origin_line.strip()
|
||||||
if len(line) < 20:
|
if len(line) < 18:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
line_spilt = line.split('主播: ')
|
line_spilt = line.split('主播: ')
|
||||||
@ -1882,15 +2040,23 @@ while True:
|
|||||||
'e.tb.cn',
|
'e.tb.cn',
|
||||||
'huodong.m.taobao.com',
|
'huodong.m.taobao.com',
|
||||||
'3.cn',
|
'3.cn',
|
||||||
'eco.m.jd.com'
|
'eco.m.jd.com',
|
||||||
|
'www.miguvideo.com',
|
||||||
|
'm.miguvideo.com',
|
||||||
|
'show.lailianjie.com',
|
||||||
|
'www.imkktv.com',
|
||||||
|
'www.picarto.tv'
|
||||||
]
|
]
|
||||||
overseas_platform_host = [
|
overseas_platform_host = [
|
||||||
'www.tiktok.com',
|
'www.tiktok.com',
|
||||||
'play.sooplive.co.kr',
|
'play.sooplive.co.kr',
|
||||||
'm.sooplive.co.kr',
|
'm.sooplive.co.kr',
|
||||||
|
'www.sooplive.com',
|
||||||
|
'm.sooplive.com',
|
||||||
'www.pandalive.co.kr',
|
'www.pandalive.co.kr',
|
||||||
'www.winktv.co.kr',
|
'www.winktv.co.kr',
|
||||||
'www.flextv.co.kr',
|
'www.flextv.co.kr',
|
||||||
|
'www.ttinglive.com',
|
||||||
'www.popkontv.com',
|
'www.popkontv.com',
|
||||||
'www.twitch.tv',
|
'www.twitch.tv',
|
||||||
'www.liveme.com',
|
'www.liveme.com',
|
||||||
|
|||||||
42
msg_push.py
42
msg_push.py
@ -213,6 +213,42 @@ def ntfy(api: str, title: str = "message", content: str = 'test', tags: str = 't
|
|||||||
return {"success": success, "error": error}
|
return {"success": success, "error": error}
|
||||||
|
|
||||||
|
|
||||||
|
def pushplus(token: str, title: str, content: str) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
PushPlus推送通知
|
||||||
|
API文档: https://www.pushplus.plus/doc/
|
||||||
|
"""
|
||||||
|
success = []
|
||||||
|
error = []
|
||||||
|
token_list = token.replace(',', ',').split(',') if token.strip() else []
|
||||||
|
|
||||||
|
for _token in token_list:
|
||||||
|
json_data = {
|
||||||
|
'token': _token,
|
||||||
|
'title': title,
|
||||||
|
'content': content
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
url = 'https://www.pushplus.plus/send'
|
||||||
|
data = json.dumps(json_data).encode('utf-8')
|
||||||
|
req = urllib.request.Request(url, data=data, headers=headers)
|
||||||
|
response = opener.open(req, timeout=10)
|
||||||
|
json_str = response.read().decode('utf-8')
|
||||||
|
json_data = json.loads(json_str)
|
||||||
|
|
||||||
|
if json_data.get('code') == 200:
|
||||||
|
success.append(_token)
|
||||||
|
else:
|
||||||
|
error.append(_token)
|
||||||
|
print(f'PushPlus推送失败, Token:{_token}, 失败信息:{json_data.get("msg", "未知错误")}')
|
||||||
|
except Exception as e:
|
||||||
|
error.append(_token)
|
||||||
|
print(f'PushPlus推送失败, Token:{_token}, 错误信息:{e}')
|
||||||
|
|
||||||
|
return {"success": success, "error": error}
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
send_title = '直播通知' # 标题
|
send_title = '直播通知' # 标题
|
||||||
send_content = '张三 开播了!' # 推送内容
|
send_content = '张三 开播了!' # 推送内容
|
||||||
@ -252,4 +288,8 @@ if __name__ == '__main__':
|
|||||||
api="https://ntfy.sh/xxxxx",
|
api="https://ntfy.sh/xxxxx",
|
||||||
title="直播推送",
|
title="直播推送",
|
||||||
content="xxx已开播",
|
content="xxx已开播",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# PushPlus推送通知
|
||||||
|
pushplus_token = '' # 替换成自己的PushPlus Token,获取地址:https://www.pushplus.plus/
|
||||||
|
# pushplus(pushplus_token, send_title, send_content)
|
||||||
|
|||||||
23
pyproject.toml
Normal file
23
pyproject.toml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
[project]
|
||||||
|
name = "DouyinLiveRecorder"
|
||||||
|
version = "4.0.7"
|
||||||
|
description = "可循环值守和多人录制的直播录制软件, 支持抖音、TikTok、Youtube、快手、虎牙、斗鱼、B站、小红书、pandatv、sooplive、flextv、popkontv、twitcasting、winktv、百度、微博、酷狗、17Live、Twitch、Acfun、CHZZK、shopee等40+平台直播录制"
|
||||||
|
readme = "README.md"
|
||||||
|
authors = [{name = "Hmily"}]
|
||||||
|
license = { text = "MIT" }
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
dependencies = [
|
||||||
|
"requests>=2.31.0",
|
||||||
|
"loguru>=0.7.3",
|
||||||
|
"pycryptodome>=3.20.0",
|
||||||
|
"distro>=1.9.0",
|
||||||
|
"tqdm>=4.67.1",
|
||||||
|
"httpx[http2]>=0.28.1",
|
||||||
|
"PyExecJS>=1.5.1"
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
"Homepage" = "https://github.com/ihmily/DouyinLiveRecorder"
|
||||||
|
"Documentation" = "https://github.com/ihmily/DouyinLiveRecorder"
|
||||||
|
"Repository" = "https://github.com/ihmily/DouyinLiveRecorder"
|
||||||
|
"Issues" = "https://github.com/ihmily/DouyinLiveRecorder/issues"
|
||||||
454
src/ab_sign.py
Normal file
454
src/ab_sign.py
Normal file
@ -0,0 +1,454 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
import math
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
def rc4_encrypt(plaintext: str, key: str) -> str:
|
||||||
|
# 初始化状态数组
|
||||||
|
s = list(range(256))
|
||||||
|
|
||||||
|
# 使用密钥对状态数组进行置换
|
||||||
|
j = 0
|
||||||
|
for i in range(256):
|
||||||
|
j = (j + s[i] + ord(key[i % len(key)])) % 256
|
||||||
|
s[i], s[j] = s[j], s[i]
|
||||||
|
|
||||||
|
# 生成密钥流并加密
|
||||||
|
i = j = 0
|
||||||
|
result = []
|
||||||
|
for char in plaintext:
|
||||||
|
i = (i + 1) % 256
|
||||||
|
j = (j + s[i]) % 256
|
||||||
|
s[i], s[j] = s[j], s[i]
|
||||||
|
t = (s[i] + s[j]) % 256
|
||||||
|
result.append(chr(s[t] ^ ord(char)))
|
||||||
|
|
||||||
|
return ''.join(result)
|
||||||
|
|
||||||
|
|
||||||
|
def left_rotate(x: int, n: int) -> int:
|
||||||
|
n %= 32
|
||||||
|
return ((x << n) | (x >> (32 - n))) & 0xFFFFFFFF
|
||||||
|
|
||||||
|
|
||||||
|
def get_t_j(j: int) -> int:
|
||||||
|
if 0 <= j < 16:
|
||||||
|
return 2043430169 # 0x79CC4519
|
||||||
|
elif 16 <= j < 64:
|
||||||
|
return 2055708042 # 0x7A879D8A
|
||||||
|
else:
|
||||||
|
raise ValueError("invalid j for constant Tj")
|
||||||
|
|
||||||
|
|
||||||
|
def ff_j(j: int, x: int, y: int, z: int) -> int:
|
||||||
|
if 0 <= j < 16:
|
||||||
|
return (x ^ y ^ z) & 0xFFFFFFFF
|
||||||
|
elif 16 <= j < 64:
|
||||||
|
return ((x & y) | (x & z) | (y & z)) & 0xFFFFFFFF
|
||||||
|
else:
|
||||||
|
raise ValueError("invalid j for bool function FF")
|
||||||
|
|
||||||
|
|
||||||
|
def gg_j(j: int, x: int, y: int, z: int) -> int:
|
||||||
|
if 0 <= j < 16:
|
||||||
|
return (x ^ y ^ z) & 0xFFFFFFFF
|
||||||
|
elif 16 <= j < 64:
|
||||||
|
return ((x & y) | (~x & z)) & 0xFFFFFFFF
|
||||||
|
else:
|
||||||
|
raise ValueError("invalid j for bool function GG")
|
||||||
|
|
||||||
|
|
||||||
|
class SM3:
|
||||||
|
def __init__(self):
|
||||||
|
self.reg = []
|
||||||
|
self.chunk = []
|
||||||
|
self.size = 0
|
||||||
|
self.reset()
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
# 初始化寄存器值 - 修正为与JS版本相同的值
|
||||||
|
self.reg = [
|
||||||
|
1937774191, 1226093241, 388252375, 3666478592,
|
||||||
|
2842636476, 372324522, 3817729613, 2969243214
|
||||||
|
]
|
||||||
|
self.chunk = []
|
||||||
|
self.size = 0
|
||||||
|
|
||||||
|
def write(self, data):
|
||||||
|
# 将输入转换为字节数组
|
||||||
|
if isinstance(data, str):
|
||||||
|
# 直接转换为UTF-8字节列表
|
||||||
|
a = list(data.encode('utf-8'))
|
||||||
|
else:
|
||||||
|
a = data
|
||||||
|
|
||||||
|
self.size += len(a)
|
||||||
|
f = 64 - len(self.chunk)
|
||||||
|
|
||||||
|
if len(a) < f:
|
||||||
|
# 如果数据长度小于剩余空间,直接添加
|
||||||
|
self.chunk.extend(a)
|
||||||
|
else:
|
||||||
|
# 否则分块处理
|
||||||
|
self.chunk.extend(a[:f])
|
||||||
|
|
||||||
|
while len(self.chunk) >= 64:
|
||||||
|
self._compress(self.chunk)
|
||||||
|
if f < len(a):
|
||||||
|
self.chunk = a[f:min(f + 64, len(a))]
|
||||||
|
else:
|
||||||
|
self.chunk = []
|
||||||
|
f += 64
|
||||||
|
|
||||||
|
def _fill(self):
|
||||||
|
# 计算比特长度
|
||||||
|
bit_length = 8 * self.size
|
||||||
|
|
||||||
|
# 添加填充位
|
||||||
|
padding_pos = len(self.chunk)
|
||||||
|
self.chunk.append(0x80)
|
||||||
|
padding_pos = (padding_pos + 1) % 64
|
||||||
|
|
||||||
|
# 如果剩余空间不足8字节,则填充到下一个块
|
||||||
|
if 64 - padding_pos < 8:
|
||||||
|
padding_pos -= 64
|
||||||
|
|
||||||
|
# 填充0直到剩余8字节用于存储长度
|
||||||
|
while padding_pos < 56:
|
||||||
|
self.chunk.append(0)
|
||||||
|
padding_pos += 1
|
||||||
|
|
||||||
|
# 添加消息长度(高32位)
|
||||||
|
high_bits = bit_length // 4294967296
|
||||||
|
for i in range(4):
|
||||||
|
self.chunk.append((high_bits >> (8 * (3 - i))) & 0xFF)
|
||||||
|
|
||||||
|
# 添加消息长度(低32位)
|
||||||
|
for i in range(4):
|
||||||
|
self.chunk.append((bit_length >> (8 * (3 - i))) & 0xFF)
|
||||||
|
|
||||||
|
def _compress(self, data):
|
||||||
|
if len(data) < 64:
|
||||||
|
raise ValueError("compress error: not enough data")
|
||||||
|
else:
|
||||||
|
# 消息扩展
|
||||||
|
w = [0] * 132
|
||||||
|
|
||||||
|
# 将字节数组转换为字
|
||||||
|
for t in range(16):
|
||||||
|
w[t] = (data[4 * t] << 24) | (data[4 * t + 1] << 16) | (data[4 * t + 2] << 8) | data[4 * t + 3]
|
||||||
|
w[t] &= 0xFFFFFFFF
|
||||||
|
|
||||||
|
# 消息扩展
|
||||||
|
for j in range(16, 68):
|
||||||
|
a = w[j - 16] ^ w[j - 9] ^ left_rotate(w[j - 3], 15)
|
||||||
|
a = a ^ left_rotate(a, 15) ^ left_rotate(a, 23)
|
||||||
|
w[j] = (a ^ left_rotate(w[j - 13], 7) ^ w[j - 6]) & 0xFFFFFFFF
|
||||||
|
|
||||||
|
# 计算w'
|
||||||
|
for j in range(64):
|
||||||
|
w[j + 68] = (w[j] ^ w[j + 4]) & 0xFFFFFFFF
|
||||||
|
|
||||||
|
# 压缩
|
||||||
|
a, b, c, d, e, f, g, h = self.reg
|
||||||
|
|
||||||
|
for j in range(64):
|
||||||
|
ss1 = left_rotate((left_rotate(a, 12) + e + left_rotate(get_t_j(j), j)) & 0xFFFFFFFF, 7)
|
||||||
|
ss2 = ss1 ^ left_rotate(a, 12)
|
||||||
|
tt1 = (ff_j(j, a, b, c) + d + ss2 + w[j + 68]) & 0xFFFFFFFF
|
||||||
|
tt2 = (gg_j(j, e, f, g) + h + ss1 + w[j]) & 0xFFFFFFFF
|
||||||
|
|
||||||
|
d = c
|
||||||
|
c = left_rotate(b, 9)
|
||||||
|
b = a
|
||||||
|
a = tt1
|
||||||
|
h = g
|
||||||
|
g = left_rotate(f, 19)
|
||||||
|
f = e
|
||||||
|
e = (tt2 ^ left_rotate(tt2, 9) ^ left_rotate(tt2, 17)) & 0xFFFFFFFF
|
||||||
|
|
||||||
|
# 更新寄存器
|
||||||
|
self.reg[0] ^= a
|
||||||
|
self.reg[1] ^= b
|
||||||
|
self.reg[2] ^= c
|
||||||
|
self.reg[3] ^= d
|
||||||
|
self.reg[4] ^= e
|
||||||
|
self.reg[5] ^= f
|
||||||
|
self.reg[6] ^= g
|
||||||
|
self.reg[7] ^= h
|
||||||
|
|
||||||
|
def sum(self, data=None, output_format=None):
|
||||||
|
"""
|
||||||
|
计算哈希值
|
||||||
|
"""
|
||||||
|
# 如果提供了输入,则重置并写入
|
||||||
|
if data is not None:
|
||||||
|
self.reset()
|
||||||
|
self.write(data)
|
||||||
|
|
||||||
|
self._fill()
|
||||||
|
|
||||||
|
# 分块压缩
|
||||||
|
for f in range(0, len(self.chunk), 64):
|
||||||
|
self._compress(self.chunk[f:f + 64])
|
||||||
|
|
||||||
|
if output_format == 'hex':
|
||||||
|
# 十六进制输出
|
||||||
|
result = ''.join(f'{val:08x}' for val in self.reg)
|
||||||
|
else:
|
||||||
|
# 字节数组输出
|
||||||
|
result = []
|
||||||
|
for f in range(8):
|
||||||
|
c = self.reg[f]
|
||||||
|
result.append((c >> 24) & 0xFF)
|
||||||
|
result.append((c >> 16) & 0xFF)
|
||||||
|
result.append((c >> 8) & 0xFF)
|
||||||
|
result.append(c & 0xFF)
|
||||||
|
|
||||||
|
self.reset()
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def result_encrypt(long_str: str, num: str | None = None) -> str:
|
||||||
|
# 魔改base64编码表
|
||||||
|
encoding_tables = {
|
||||||
|
"s0": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
|
||||||
|
"s1": "Dkdpgh4ZKsQB80/Mfvw36XI1R25+WUAlEi7NLboqYTOPuzmFjJnryx9HVGcaStCe=",
|
||||||
|
"s2": "Dkdpgh4ZKsQB80/Mfvw36XI1R25-WUAlEi7NLboqYTOPuzmFjJnryx9HVGcaStCe=",
|
||||||
|
"s3": "ckdp1h4ZKsUB80/Mfvw36XIgR25+WQAlEi7NLboqYTOPuzmFjJnryx9HVGDaStCe",
|
||||||
|
"s4": "Dkdpgh2ZmsQB80/MfvV36XI1R45-WUAlEixNLwoqYTOPuzKFjJnry79HbGcaStCe"
|
||||||
|
}
|
||||||
|
|
||||||
|
# 位移常量
|
||||||
|
masks = [16515072, 258048, 4032, 63] # 对应 0, 1, 2 的掩码,添加63作为第四个掩码
|
||||||
|
shifts = [18, 12, 6, 0] # 对应的位移量
|
||||||
|
|
||||||
|
encoding_table = encoding_tables[num]
|
||||||
|
|
||||||
|
result = ""
|
||||||
|
round_num = 0
|
||||||
|
long_int = get_long_int(round_num, long_str)
|
||||||
|
|
||||||
|
total_chars = math.ceil(len(long_str) / 3 * 4)
|
||||||
|
|
||||||
|
for i in range(total_chars):
|
||||||
|
# 每4个字符处理一组3字节
|
||||||
|
if i // 4 != round_num:
|
||||||
|
round_num += 1
|
||||||
|
long_int = get_long_int(round_num, long_str)
|
||||||
|
|
||||||
|
# 计算当前位置的索引
|
||||||
|
index = i % 4
|
||||||
|
|
||||||
|
# 使用掩码和位移提取6位值
|
||||||
|
char_index = (long_int & masks[index]) >> shifts[index]
|
||||||
|
|
||||||
|
result += encoding_table[char_index]
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def get_long_int(round_num: int, long_str: str) -> int:
|
||||||
|
round_num = round_num * 3
|
||||||
|
|
||||||
|
# 获取字符串中的字符,如果超出范围则使用0
|
||||||
|
char1 = ord(long_str[round_num]) if round_num < len(long_str) else 0
|
||||||
|
char2 = ord(long_str[round_num + 1]) if round_num + 1 < len(long_str) else 0
|
||||||
|
char3 = ord(long_str[round_num + 2]) if round_num + 2 < len(long_str) else 0
|
||||||
|
|
||||||
|
return (char1 << 16) | (char2 << 8) | char3
|
||||||
|
|
||||||
|
|
||||||
|
def gener_random(random_num: int, option: list[int]) -> list[int]:
|
||||||
|
byte1 = random_num & 255
|
||||||
|
byte2 = (random_num >> 8) & 255
|
||||||
|
|
||||||
|
return [
|
||||||
|
(byte1 & 170) | (option[0] & 85), # 偶数位与option[0]的奇数位合并
|
||||||
|
(byte1 & 85) | (option[0] & 170), # 奇数位与option[0]的偶数位合并
|
||||||
|
(byte2 & 170) | (option[1] & 85), # 偶数位与option[1]的奇数位合并
|
||||||
|
(byte2 & 85) | (option[1] & 170), # 奇数位与option[1]的偶数位合并
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def generate_random_str() -> str:
|
||||||
|
"""
|
||||||
|
生成随机字符串
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
随机字符串
|
||||||
|
"""
|
||||||
|
# 使用与JS版本相同的固定随机值
|
||||||
|
random_values = [0.123456789, 0.987654321, 0.555555555]
|
||||||
|
|
||||||
|
# 生成三组随机字节并合并
|
||||||
|
random_bytes = []
|
||||||
|
random_bytes.extend(gener_random(int(random_values[0] * 10000), [3, 45]))
|
||||||
|
random_bytes.extend(gener_random(int(random_values[1] * 10000), [1, 0]))
|
||||||
|
random_bytes.extend(gener_random(int(random_values[2] * 10000), [1, 5]))
|
||||||
|
|
||||||
|
return ''.join(chr(b) for b in random_bytes)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_rc4_bb_str(url_search_params: str, user_agent: str, window_env_str: str,
|
||||||
|
suffix: str = "cus", arguments: list[int] | None = None) -> str:
|
||||||
|
if arguments is None:
|
||||||
|
arguments = [0, 1, 14]
|
||||||
|
|
||||||
|
sm3 = SM3()
|
||||||
|
start_time = int(time.time() * 1000)
|
||||||
|
|
||||||
|
# 三次加密处理
|
||||||
|
# 1: url_search_params两次sm3之的结果
|
||||||
|
url_search_params_list = sm3.sum(sm3.sum(url_search_params + suffix))
|
||||||
|
# 2: 对后缀两次sm3之的结果
|
||||||
|
cus = sm3.sum(sm3.sum(suffix))
|
||||||
|
# 3: 对ua处理之后的结果
|
||||||
|
ua_key = chr(0) + chr(1) + chr(14) # [1/256, 1, 14]
|
||||||
|
ua = sm3.sum(result_encrypt(
|
||||||
|
rc4_encrypt(user_agent, ua_key),
|
||||||
|
"s3"
|
||||||
|
))
|
||||||
|
|
||||||
|
end_time = start_time + 100
|
||||||
|
|
||||||
|
# 构建配置对象
|
||||||
|
b = {
|
||||||
|
8: 3,
|
||||||
|
10: end_time,
|
||||||
|
15: {
|
||||||
|
"aid": 6383,
|
||||||
|
"pageId": 110624,
|
||||||
|
"boe": False,
|
||||||
|
"ddrt": 7,
|
||||||
|
"paths": {
|
||||||
|
"include": [{} for _ in range(7)],
|
||||||
|
"exclude": []
|
||||||
|
},
|
||||||
|
"track": {
|
||||||
|
"mode": 0,
|
||||||
|
"delay": 300,
|
||||||
|
"paths": []
|
||||||
|
},
|
||||||
|
"dump": True,
|
||||||
|
"rpU": "hwj"
|
||||||
|
},
|
||||||
|
16: start_time,
|
||||||
|
18: 44,
|
||||||
|
19: [1, 0, 1, 5],
|
||||||
|
}
|
||||||
|
|
||||||
|
def split_to_bytes(num: int) -> list[int]:
|
||||||
|
return [
|
||||||
|
(num >> 24) & 255,
|
||||||
|
(num >> 16) & 255,
|
||||||
|
(num >> 8) & 255,
|
||||||
|
num & 255
|
||||||
|
]
|
||||||
|
|
||||||
|
# 处理时间戳
|
||||||
|
start_time_bytes = split_to_bytes(b[16])
|
||||||
|
b[20] = start_time_bytes[0]
|
||||||
|
b[21] = start_time_bytes[1]
|
||||||
|
b[22] = start_time_bytes[2]
|
||||||
|
b[23] = start_time_bytes[3]
|
||||||
|
b[24] = int(b[16] / 256 / 256 / 256 / 256) & 255
|
||||||
|
b[25] = int(b[16] / 256 / 256 / 256 / 256 / 256) & 255
|
||||||
|
|
||||||
|
# 处理Arguments参数
|
||||||
|
arg0_bytes = split_to_bytes(arguments[0])
|
||||||
|
b[26] = arg0_bytes[0]
|
||||||
|
b[27] = arg0_bytes[1]
|
||||||
|
b[28] = arg0_bytes[2]
|
||||||
|
b[29] = arg0_bytes[3]
|
||||||
|
|
||||||
|
b[30] = int(arguments[1] / 256) & 255
|
||||||
|
b[31] = (arguments[1] % 256) & 255
|
||||||
|
|
||||||
|
arg1_bytes = split_to_bytes(arguments[1])
|
||||||
|
b[32] = arg1_bytes[0]
|
||||||
|
b[33] = arg1_bytes[1]
|
||||||
|
|
||||||
|
arg2_bytes = split_to_bytes(arguments[2])
|
||||||
|
b[34] = arg2_bytes[0]
|
||||||
|
b[35] = arg2_bytes[1]
|
||||||
|
b[36] = arg2_bytes[2]
|
||||||
|
b[37] = arg2_bytes[3]
|
||||||
|
|
||||||
|
# 处理加密结果
|
||||||
|
b[38] = url_search_params_list[21]
|
||||||
|
b[39] = url_search_params_list[22]
|
||||||
|
b[40] = cus[21]
|
||||||
|
b[41] = cus[22]
|
||||||
|
b[42] = ua[23]
|
||||||
|
b[43] = ua[24]
|
||||||
|
|
||||||
|
# 处理结束时间
|
||||||
|
end_time_bytes = split_to_bytes(b[10])
|
||||||
|
b[44] = end_time_bytes[0]
|
||||||
|
b[45] = end_time_bytes[1]
|
||||||
|
b[46] = end_time_bytes[2]
|
||||||
|
b[47] = end_time_bytes[3]
|
||||||
|
b[48] = b[8]
|
||||||
|
b[49] = int(b[10] / 256 / 256 / 256 / 256) & 255
|
||||||
|
b[50] = int(b[10] / 256 / 256 / 256 / 256 / 256) & 255
|
||||||
|
|
||||||
|
# 处理配置项
|
||||||
|
b[51] = b[15]['pageId']
|
||||||
|
|
||||||
|
page_id_bytes = split_to_bytes(b[15]['pageId'])
|
||||||
|
b[52] = page_id_bytes[0]
|
||||||
|
b[53] = page_id_bytes[1]
|
||||||
|
b[54] = page_id_bytes[2]
|
||||||
|
b[55] = page_id_bytes[3]
|
||||||
|
|
||||||
|
b[56] = b[15]['aid']
|
||||||
|
b[57] = b[15]['aid'] & 255
|
||||||
|
b[58] = (b[15]['aid'] >> 8) & 255
|
||||||
|
b[59] = (b[15]['aid'] >> 16) & 255
|
||||||
|
b[60] = (b[15]['aid'] >> 24) & 255
|
||||||
|
|
||||||
|
# 处理环境信息
|
||||||
|
window_env_list = [ord(char) for char in window_env_str]
|
||||||
|
b[64] = len(window_env_list)
|
||||||
|
b[65] = b[64] & 255
|
||||||
|
b[66] = (b[64] >> 8) & 255
|
||||||
|
|
||||||
|
b[69] = 0
|
||||||
|
b[70] = 0
|
||||||
|
b[71] = 0
|
||||||
|
|
||||||
|
# 计算校验和
|
||||||
|
b[72] = b[18] ^ b[20] ^ b[26] ^ b[30] ^ b[38] ^ b[40] ^ b[42] ^ b[21] ^ b[27] ^ b[31] ^ \
|
||||||
|
b[35] ^ b[39] ^ b[41] ^ b[43] ^ b[22] ^ b[28] ^ b[32] ^ b[36] ^ b[23] ^ b[29] ^ \
|
||||||
|
b[33] ^ b[37] ^ b[44] ^ b[45] ^ b[46] ^ b[47] ^ b[48] ^ b[49] ^ b[50] ^ b[24] ^ \
|
||||||
|
b[25] ^ b[52] ^ b[53] ^ b[54] ^ b[55] ^ b[57] ^ b[58] ^ b[59] ^ b[60] ^ b[65] ^ \
|
||||||
|
b[66] ^ b[70] ^ b[71]
|
||||||
|
|
||||||
|
# 构建最终字节数组
|
||||||
|
bb = [
|
||||||
|
b[18], b[20], b[52], b[26], b[30], b[34], b[58], b[38], b[40], b[53], b[42], b[21],
|
||||||
|
b[27], b[54], b[55], b[31], b[35], b[57], b[39], b[41], b[43], b[22], b[28], b[32],
|
||||||
|
b[60], b[36], b[23], b[29], b[33], b[37], b[44], b[45], b[59], b[46], b[47], b[48],
|
||||||
|
b[49], b[50], b[24], b[25], b[65], b[66], b[70], b[71]
|
||||||
|
]
|
||||||
|
bb.extend(window_env_list)
|
||||||
|
bb.append(b[72])
|
||||||
|
|
||||||
|
return rc4_encrypt(
|
||||||
|
''.join(chr(byte) for byte in bb),
|
||||||
|
chr(121)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def ab_sign(url_search_params: str, user_agent: str) -> str:
|
||||||
|
window_env_str = "1920|1080|1920|1040|0|30|0|0|1872|92|1920|1040|1857|92|1|24|Win32"
|
||||||
|
|
||||||
|
# 1. 生成随机字符串前缀
|
||||||
|
# 2. 生成RC4加密的主体部分
|
||||||
|
# 3. 对结果进行最终加密并添加等号后缀
|
||||||
|
return result_encrypt(
|
||||||
|
generate_random_str() +
|
||||||
|
generate_rc4_bb_str(url_search_params, user_agent, window_env_str),
|
||||||
|
"s4"
|
||||||
|
) + "="
|
||||||
@ -2,7 +2,6 @@
|
|||||||
import gzip
|
import gzip
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.error
|
import urllib.error
|
||||||
from urllib.request import Request
|
|
||||||
import requests
|
import requests
|
||||||
import ssl
|
import ssl
|
||||||
import json
|
import json
|
||||||
|
|||||||
33
src/javascript/laixiu.js
Normal file
33
src/javascript/laixiu.js
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
|
||||||
|
function generateUUID() {
|
||||||
|
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
||||||
|
const r = Math.random() * 16 | 0, v = c === 'x' ? r : (r & 0x3 | 0x8);
|
||||||
|
return v.toString(16);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function calculateSign() {
|
||||||
|
const a = new Date().getTime();
|
||||||
|
const s = generateUUID().replace(/-/g, "");
|
||||||
|
const u = 'kk792f28d6ff1f34ec702c08626d454b39pro';
|
||||||
|
|
||||||
|
const input = "web" + s + a + u;
|
||||||
|
|
||||||
|
const hash = CryptoJS.MD5(input).toString();
|
||||||
|
|
||||||
|
return {
|
||||||
|
timestamp: a,
|
||||||
|
imei: s,
|
||||||
|
requestId: hash,
|
||||||
|
inputString: input
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function sign(cryptoJSPath) {
|
||||||
|
CryptoJS = require(cryptoJSPath);
|
||||||
|
return calculateSign();
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
sign
|
||||||
|
};
|
||||||
143
src/javascript/migu.js
Normal file
143
src/javascript/migu.js
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
/**
|
||||||
|
* Function to get the ddCalcu parameter value
|
||||||
|
* @param {string} inputUrl - The original URL before encryption
|
||||||
|
* @returns {Promise<string>} - Returns the calculated ddCalcu value
|
||||||
|
*/
|
||||||
|
async function getDdCalcu(inputUrl) {
|
||||||
|
let wasmInstance = null;
|
||||||
|
let memory_p = null; // Uint8Array view
|
||||||
|
let memory_h = null; // Uint32Array view
|
||||||
|
|
||||||
|
// Fixed parameter
|
||||||
|
const f = 'PBTxuWiTEbUPPFcpyxs0ww==';
|
||||||
|
|
||||||
|
// Utility function: Convert string to UTF-8 in memory
|
||||||
|
function stringToUTF8(string, offset) {
|
||||||
|
const encoder = new TextEncoder();
|
||||||
|
const encoded = encoder.encode(string);
|
||||||
|
for (let i = 0; i < encoded.length; i++) {
|
||||||
|
memory_p[offset + i] = encoded[i];
|
||||||
|
}
|
||||||
|
memory_p[offset + encoded.length] = 0; // Null-terminate
|
||||||
|
}
|
||||||
|
|
||||||
|
// Utility function: Read UTF-8 string from memory address
|
||||||
|
function UTF8ToString(offset) {
|
||||||
|
let s = '';
|
||||||
|
let i = 0;
|
||||||
|
while (memory_p[offset + i]) {
|
||||||
|
s += String.fromCharCode(memory_p[offset + i]);
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
// WASM import function stubs
|
||||||
|
function a(e, t, r, n) {
|
||||||
|
let s = 0;
|
||||||
|
for (let i = 0; i < r; i++) {
|
||||||
|
const d = memory_h[t + 4 >> 2];
|
||||||
|
t += 8;
|
||||||
|
s += d;
|
||||||
|
}
|
||||||
|
memory_h[n >> 2] = s;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
function b() {}
|
||||||
|
|
||||||
|
function c() {}
|
||||||
|
|
||||||
|
// Step 1: Retrieve playerVersion
|
||||||
|
const settingsResp = await fetch('https://app-sc.miguvideo.com/common/v1/settings/H5_DetailPage');
|
||||||
|
const settingsData = await settingsResp.json();
|
||||||
|
const playerVersion = JSON.parse(settingsData.body.paramValue).playerVersion;
|
||||||
|
|
||||||
|
// Step 2: Load WASM module
|
||||||
|
const wasmUrl = `https://www.miguvideo.com/mgs/player/prd/${playerVersion}/dist/mgprtcl.wasm`;
|
||||||
|
const wasmResp = await fetch(wasmUrl);
|
||||||
|
if (!wasmResp.ok) throw new Error("Failed to download WASM");
|
||||||
|
const wasmBuffer = await wasmResp.arrayBuffer();
|
||||||
|
|
||||||
|
const importObject = {
|
||||||
|
a: { a, b, c }
|
||||||
|
};
|
||||||
|
|
||||||
|
const { instance } = await WebAssembly.instantiate(wasmBuffer, importObject);
|
||||||
|
wasmInstance = instance;
|
||||||
|
|
||||||
|
const memory = wasmInstance.exports.d;
|
||||||
|
memory_p = new Uint8Array(memory.buffer);
|
||||||
|
memory_h = new Uint32Array(memory.buffer);
|
||||||
|
|
||||||
|
const exports = {
|
||||||
|
CallInterface1: wasmInstance.exports.h,
|
||||||
|
CallInterface2: wasmInstance.exports.i,
|
||||||
|
CallInterface3: wasmInstance.exports.j,
|
||||||
|
CallInterface4: wasmInstance.exports.k,
|
||||||
|
CallInterface6: wasmInstance.exports.m,
|
||||||
|
CallInterface7: wasmInstance.exports.n,
|
||||||
|
CallInterface8: wasmInstance.exports.o,
|
||||||
|
CallInterface9: wasmInstance.exports.p,
|
||||||
|
CallInterface10: wasmInstance.exports.q,
|
||||||
|
CallInterface11: wasmInstance.exports.r,
|
||||||
|
CallInterface14: wasmInstance.exports.t,
|
||||||
|
malloc: wasmInstance.exports.u,
|
||||||
|
};
|
||||||
|
|
||||||
|
const parsedUrl = new URL(inputUrl);
|
||||||
|
const query = Object.fromEntries(parsedUrl.searchParams);
|
||||||
|
|
||||||
|
const o = query.userid || '';
|
||||||
|
const a_val = query.timestamp || '';
|
||||||
|
const s = query.ProgramID || '';
|
||||||
|
const u = query.Channel_ID || '';
|
||||||
|
const v = query.puData || '';
|
||||||
|
|
||||||
|
// Allocate memory
|
||||||
|
const d = exports.malloc(o.length + 1);
|
||||||
|
const h = exports.malloc(a_val.length + 1);
|
||||||
|
const y = exports.malloc(s.length + 1);
|
||||||
|
const m = exports.malloc(u.length + 1);
|
||||||
|
const g = exports.malloc(v.length + 1);
|
||||||
|
const b_val = exports.malloc(f.length + 1);
|
||||||
|
const E = exports.malloc(128);
|
||||||
|
const T = exports.malloc(128);
|
||||||
|
|
||||||
|
// Write data to memory
|
||||||
|
stringToUTF8(o, d);
|
||||||
|
stringToUTF8(a_val, h);
|
||||||
|
stringToUTF8(s, y);
|
||||||
|
stringToUTF8(u, m);
|
||||||
|
stringToUTF8(v, g);
|
||||||
|
stringToUTF8(f, b_val);
|
||||||
|
|
||||||
|
// Call interface functions
|
||||||
|
const S = exports.CallInterface6(); // Create context
|
||||||
|
exports.CallInterface1(S, y, s.length);
|
||||||
|
exports.CallInterface10(S, h, a_val.length);
|
||||||
|
exports.CallInterface9(S, d, o.length);
|
||||||
|
exports.CallInterface3(S, 0, 0);
|
||||||
|
exports.CallInterface11(S, 0, 0);
|
||||||
|
exports.CallInterface8(S, g, v.length);
|
||||||
|
exports.CallInterface2(S, m, u.length);
|
||||||
|
exports.CallInterface14(S, b_val, f.length, T, 128);
|
||||||
|
|
||||||
|
const w = UTF8ToString(T);
|
||||||
|
const I = exports.malloc(w.length + 1);
|
||||||
|
stringToUTF8(w, I);
|
||||||
|
|
||||||
|
exports.CallInterface7(S, I, w.length);
|
||||||
|
exports.CallInterface4(S, E, 128);
|
||||||
|
|
||||||
|
return UTF8ToString(E);
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = process.argv[2];
|
||||||
|
|
||||||
|
getDdCalcu(url).then(result => {
|
||||||
|
console.log(result);
|
||||||
|
}).catch(err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
@ -2,10 +2,19 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
logger.remove()
|
||||||
|
|
||||||
custom_format = "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | <level>{level: <8}</level> - <level>{message}</level>"
|
custom_format = "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | <level>{level: <8}</level> - <level>{message}</level>"
|
||||||
os.environ["LOGURU_FORMAT"] = custom_format
|
|
||||||
from loguru import logger
|
logger.add(
|
||||||
|
sink=sys.stderr,
|
||||||
|
format=custom_format,
|
||||||
|
level="DEBUG",
|
||||||
|
colorize=True,
|
||||||
|
enqueue=True
|
||||||
|
)
|
||||||
|
|
||||||
script_path = os.path.split(os.path.realpath(sys.argv[0]))[0]
|
script_path = os.path.split(os.path.realpath(sys.argv[0]))[0]
|
||||||
|
|
||||||
|
|||||||
501
src/spider.py
501
src/spider.py
@ -4,14 +4,16 @@
|
|||||||
Author: Hmily
|
Author: Hmily
|
||||||
GitHub: https://github.com/ihmily
|
GitHub: https://github.com/ihmily
|
||||||
Date: 2023-07-15 23:15:00
|
Date: 2023-07-15 23:15:00
|
||||||
Update: 2025-02-08 17:59:16
|
Update: 2025-10-23 18:28:00
|
||||||
Copyright (c) 2023-2025 by Hmily, All Rights Reserved.
|
Copyright (c) 2023-2025 by Hmily, All Rights Reserved.
|
||||||
Function: Get live stream data.
|
Function: Get live stream data.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import random
|
import random
|
||||||
|
import subprocess
|
||||||
import time
|
import time
|
||||||
|
import uuid
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.error
|
import urllib.error
|
||||||
@ -23,10 +25,11 @@ import json
|
|||||||
import execjs
|
import execjs
|
||||||
import urllib.request
|
import urllib.request
|
||||||
from . import JS_SCRIPT_PATH, utils
|
from . import JS_SCRIPT_PATH, utils
|
||||||
from .utils import trace_error_decorator
|
from .utils import trace_error_decorator, generate_random_string
|
||||||
from .logger import script_path
|
from .logger import script_path
|
||||||
from .room import get_sec_user_id, get_unique_id, UnsupportedUrlError
|
from .room import get_sec_user_id, get_unique_id, UnsupportedUrlError
|
||||||
from .http_clients.async_http import async_req, get_response_status
|
from .http_clients.async_http import async_req
|
||||||
|
from .ab_sign import ab_sign
|
||||||
|
|
||||||
|
|
||||||
ssl_context = ssl.create_default_context()
|
ssl_context = ssl.create_default_context()
|
||||||
@ -62,10 +65,87 @@ async def get_play_url_list(m3u8: str, proxy: OptionalStr = None, header: Option
|
|||||||
return play_url_list
|
return play_url_list
|
||||||
|
|
||||||
|
|
||||||
|
async def get_douyin_web_stream_data(url: str, proxy_addr: OptionalStr = None, cookies: OptionalStr = None):
|
||||||
|
headers = {
|
||||||
|
'cookie': 'ttwid=1%7C2iDIYVmjzMcpZ20fcaFde0VghXAA3NaNXE_SLR68IyE%7C1761045455'
|
||||||
|
'%7Cab35197d5cfb21df6cbb2fa7ef1c9262206b062c315b9d04da746d0b37dfbc7d',
|
||||||
|
'referer': 'https://live.douyin.com/335354047186',
|
||||||
|
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
|
||||||
|
'Chrome/116.0.5845.97 Safari/537.36 Core/1.116.567.400 QQBrowser/19.7.6764.400',
|
||||||
|
}
|
||||||
|
if cookies:
|
||||||
|
headers['cookie'] = cookies
|
||||||
|
|
||||||
|
try:
|
||||||
|
web_rid = url.split('?')[0].split('live.douyin.com/')[-1]
|
||||||
|
params = {
|
||||||
|
"aid": "6383",
|
||||||
|
"app_name": "douyin_web",
|
||||||
|
"live_id": "1",
|
||||||
|
"device_platform": "web",
|
||||||
|
"language": "zh-CN",
|
||||||
|
"browser_language": "zh-CN",
|
||||||
|
"browser_platform": "Win32",
|
||||||
|
"browser_name": "Chrome",
|
||||||
|
"browser_version": "116.0.0.0",
|
||||||
|
"web_rid": web_rid,
|
||||||
|
'msToken': '',
|
||||||
|
}
|
||||||
|
|
||||||
|
api = f'https://live.douyin.com/webcast/room/web/enter/?{urllib.parse.urlencode(params)}'
|
||||||
|
a_bogus = ab_sign(urllib.parse.urlparse(api).query, headers['user-agent'])
|
||||||
|
api += "&a_bogus=" + a_bogus
|
||||||
|
try:
|
||||||
|
json_str = await async_req(url=api, proxy_addr=proxy_addr, headers=headers)
|
||||||
|
if not json_str:
|
||||||
|
raise Exception("it triggered risk control")
|
||||||
|
json_data = json.loads(json_str)['data']
|
||||||
|
if not json_data['data']:
|
||||||
|
raise Exception(f"{url} VR live is not supported")
|
||||||
|
room_data = json_data['data'][0]
|
||||||
|
room_data['anchor_name'] = json_data['user']['nickname']
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f"Douyin web data fetch error, because {e}.")
|
||||||
|
|
||||||
|
if room_data['status'] == 2:
|
||||||
|
if 'stream_url' not in room_data:
|
||||||
|
raise RuntimeError(
|
||||||
|
"The live streaming type or gameplay is not supported on the computer side yet, please use the "
|
||||||
|
"app to share the link for recording."
|
||||||
|
)
|
||||||
|
live_core_sdk_data = room_data['stream_url']['live_core_sdk_data']
|
||||||
|
pull_datas = room_data['stream_url']['pull_datas']
|
||||||
|
if live_core_sdk_data:
|
||||||
|
if pull_datas:
|
||||||
|
key = list(pull_datas.keys())[0]
|
||||||
|
json_str = pull_datas[key]['stream_data']
|
||||||
|
else:
|
||||||
|
json_str = live_core_sdk_data['pull_data']['stream_data']
|
||||||
|
json_data = json.loads(json_str)
|
||||||
|
if 'origin' in json_data['data']:
|
||||||
|
stream_data = live_core_sdk_data['pull_data']['stream_data']
|
||||||
|
origin_data = json.loads(stream_data)['data']['origin']['main']
|
||||||
|
sdk_params = json.loads(origin_data['sdk_params'])
|
||||||
|
origin_hls_codec = sdk_params.get('VCodec') or ''
|
||||||
|
|
||||||
|
origin_url_list = json_data['data']['origin']['main']
|
||||||
|
origin_m3u8 = {'ORIGIN': origin_url_list["hls"] + '&codec=' + origin_hls_codec}
|
||||||
|
origin_flv = {'ORIGIN': origin_url_list["flv"] + '&codec=' + origin_hls_codec}
|
||||||
|
hls_pull_url_map = room_data['stream_url']['hls_pull_url_map']
|
||||||
|
flv_pull_url = room_data['stream_url']['flv_pull_url']
|
||||||
|
room_data['stream_url']['hls_pull_url_map'] = {**origin_m3u8, **hls_pull_url_map}
|
||||||
|
room_data['stream_url']['flv_pull_url'] = {**origin_flv, **flv_pull_url}
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error message: {e} Error line: {e.__traceback__.tb_lineno}")
|
||||||
|
room_data = {'anchor_name': ""}
|
||||||
|
return room_data
|
||||||
|
|
||||||
|
|
||||||
@trace_error_decorator
|
@trace_error_decorator
|
||||||
async def get_douyin_app_stream_data(url: str, proxy_addr: OptionalStr = None, cookies: OptionalStr = None) -> dict:
|
async def get_douyin_app_stream_data(url: str, proxy_addr: OptionalStr = None, cookies: OptionalStr = None) -> dict:
|
||||||
headers = {
|
headers = {
|
||||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0',
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
|
||||||
|
'Chrome/141.0.0.0 Safari/537.36 Edg/141.0.0.0',
|
||||||
'Accept-Language': 'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
|
'Accept-Language': 'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
|
||||||
'Referer': 'https://live.douyin.com/',
|
'Referer': 'https://live.douyin.com/',
|
||||||
'Cookie': 'ttwid=1%7CB1qls3GdnZhUov9o2NxOMxxYS2ff6OSvEWbv0ytbES4%7C1680522049%7C280d802d6d478e3e78d0c807f7c487e7ffec0ae4e5fdd6a0fe74c3c6af149511; my_rd=1; passport_csrf_token=3ab34460fa656183fccfb904b16ff742; passport_csrf_token_default=3ab34460fa656183fccfb904b16ff742; d_ticket=9f562383ac0547d0b561904513229d76c9c21; n_mh=hvnJEQ4Q5eiH74-84kTFUyv4VK8xtSrpRZG1AhCeFNI; store-region=cn-fj; store-region-src=uid; LOGIN_STATUS=1; __security_server_data_status=1; FORCE_LOGIN=%7B%22videoConsumedRemainSeconds%22%3A180%7D; pwa2=%223%7C0%7C3%7C0%22; download_guide=%223%2F20230729%2F0%22; volume_info=%7B%22isUserMute%22%3Afalse%2C%22isMute%22%3Afalse%2C%22volume%22%3A0.6%7D; strategyABtestKey=%221690824679.923%22; stream_recommend_feed_params=%22%7B%5C%22cookie_enabled%5C%22%3Atrue%2C%5C%22screen_width%5C%22%3A1536%2C%5C%22screen_height%5C%22%3A864%2C%5C%22browser_online%5C%22%3Atrue%2C%5C%22cpu_core_num%5C%22%3A8%2C%5C%22device_memory%5C%22%3A8%2C%5C%22downlink%5C%22%3A10%2C%5C%22effective_type%5C%22%3A%5C%224g%5C%22%2C%5C%22round_trip_time%5C%22%3A150%7D%22; VIDEO_FILTER_MEMO_SELECT=%7B%22expireTime%22%3A1691443863751%2C%22type%22%3Anull%7D; home_can_add_dy_2_desktop=%221%22; __live_version__=%221.1.1.2169%22; device_web_cpu_core=8; device_web_memory_size=8; xgplayer_user_id=346045893336; csrf_session_id=2e00356b5cd8544d17a0e66484946f28; odin_tt=724eb4dd23bc6ffaed9a1571ac4c757ef597768a70c75fef695b95845b7ffcd8b1524278c2ac31c2587996d058e03414595f0a4e856c53bd0d5e5f56dc6d82e24004dc77773e6b83ced6f80f1bb70627; __ac_nonce=064caded4009deafd8b89; __ac_signature=_02B4Z6wo00f01HLUuwwAAIDBh6tRkVLvBQBy9L-AAHiHf7; ttcid=2e9619ebbb8449eaa3d5a42d8ce88ec835; webcast_leading_last_show_time=1691016922379; webcast_leading_total_show_times=1; webcast_local_quality=sd; live_can_add_dy_2_desktop=%221%22; msToken=1JDHnVPw_9yTvzIrwb7cQj8dCMNOoesXbA_IooV8cezcOdpe4pzusZE7NB7tZn9TBXPr0ylxmv-KMs5rqbNUBHP4P7VBFUu0ZAht_BEylqrLpzgt3y5ne_38hXDOX8o=; msToken=jV_yeN1IQKUd9PlNtpL7k5vthGKcHo0dEh_QPUQhr8G3cuYv-Jbb4NnIxGDmhVOkZOCSihNpA2kvYtHiTW25XNNX_yrsv5FN8O6zm3qmCIXcEe0LywLn7oBO2gITEeg=; tt_scid=mYfqpfbDjqXrIGJuQ7q-DlQJfUSG51qG.KUdzztuGP83OjuVLXnQHjsz-BRHRJu4e986'
|
'Cookie': 'ttwid=1%7CB1qls3GdnZhUov9o2NxOMxxYS2ff6OSvEWbv0ytbES4%7C1680522049%7C280d802d6d478e3e78d0c807f7c487e7ffec0ae4e5fdd6a0fe74c3c6af149511; my_rd=1; passport_csrf_token=3ab34460fa656183fccfb904b16ff742; passport_csrf_token_default=3ab34460fa656183fccfb904b16ff742; d_ticket=9f562383ac0547d0b561904513229d76c9c21; n_mh=hvnJEQ4Q5eiH74-84kTFUyv4VK8xtSrpRZG1AhCeFNI; store-region=cn-fj; store-region-src=uid; LOGIN_STATUS=1; __security_server_data_status=1; FORCE_LOGIN=%7B%22videoConsumedRemainSeconds%22%3A180%7D; pwa2=%223%7C0%7C3%7C0%22; download_guide=%223%2F20230729%2F0%22; volume_info=%7B%22isUserMute%22%3Afalse%2C%22isMute%22%3Afalse%2C%22volume%22%3A0.6%7D; strategyABtestKey=%221690824679.923%22; stream_recommend_feed_params=%22%7B%5C%22cookie_enabled%5C%22%3Atrue%2C%5C%22screen_width%5C%22%3A1536%2C%5C%22screen_height%5C%22%3A864%2C%5C%22browser_online%5C%22%3Atrue%2C%5C%22cpu_core_num%5C%22%3A8%2C%5C%22device_memory%5C%22%3A8%2C%5C%22downlink%5C%22%3A10%2C%5C%22effective_type%5C%22%3A%5C%224g%5C%22%2C%5C%22round_trip_time%5C%22%3A150%7D%22; VIDEO_FILTER_MEMO_SELECT=%7B%22expireTime%22%3A1691443863751%2C%22type%22%3Anull%7D; home_can_add_dy_2_desktop=%221%22; __live_version__=%221.1.1.2169%22; device_web_cpu_core=8; device_web_memory_size=8; xgplayer_user_id=346045893336; csrf_session_id=2e00356b5cd8544d17a0e66484946f28; odin_tt=724eb4dd23bc6ffaed9a1571ac4c757ef597768a70c75fef695b95845b7ffcd8b1524278c2ac31c2587996d058e03414595f0a4e856c53bd0d5e5f56dc6d82e24004dc77773e6b83ced6f80f1bb70627; __ac_nonce=064caded4009deafd8b89; __ac_signature=_02B4Z6wo00f01HLUuwwAAIDBh6tRkVLvBQBy9L-AAHiHf7; ttcid=2e9619ebbb8449eaa3d5a42d8ce88ec835; webcast_leading_last_show_time=1691016922379; webcast_leading_total_show_times=1; webcast_local_quality=sd; live_can_add_dy_2_desktop=%221%22; msToken=1JDHnVPw_9yTvzIrwb7cQj8dCMNOoesXbA_IooV8cezcOdpe4pzusZE7NB7tZn9TBXPr0ylxmv-KMs5rqbNUBHP4P7VBFUu0ZAht_BEylqrLpzgt3y5ne_38hXDOX8o=; msToken=jV_yeN1IQKUd9PlNtpL7k5vthGKcHo0dEh_QPUQhr8G3cuYv-Jbb4NnIxGDmhVOkZOCSihNpA2kvYtHiTW25XNNX_yrsv5FN8O6zm3qmCIXcEe0LywLn7oBO2gITEeg=; tt_scid=mYfqpfbDjqXrIGJuQ7q-DlQJfUSG51qG.KUdzztuGP83OjuVLXnQHjsz-BRHRJu4e986'
|
||||||
@ -75,7 +155,7 @@ async def get_douyin_app_stream_data(url: str, proxy_addr: OptionalStr = None, c
|
|||||||
|
|
||||||
async def get_app_data(room_id: str, sec_uid: str) -> dict:
|
async def get_app_data(room_id: str, sec_uid: str) -> dict:
|
||||||
app_params = {
|
app_params = {
|
||||||
"verifyFp": "verify_lxj5zv70_7szNlAB7_pxNY_48Vh_ALKF_GA1Uf3yteoOY",
|
"verifyFp": "verify_hwj52020_7szNlAB7_pxNY_48Vh_ALKF_GA1Uf3yteoOY",
|
||||||
"type_id": "0",
|
"type_id": "0",
|
||||||
"live_id": "1",
|
"live_id": "1",
|
||||||
"room_id": room_id,
|
"room_id": room_id,
|
||||||
@ -84,34 +164,25 @@ async def get_douyin_app_stream_data(url: str, proxy_addr: OptionalStr = None, c
|
|||||||
"app_id": "1128"
|
"app_id": "1128"
|
||||||
}
|
}
|
||||||
api2 = f'https://webcast.amemv.com/webcast/room/reflow/info/?{urllib.parse.urlencode(app_params)}'
|
api2 = f'https://webcast.amemv.com/webcast/room/reflow/info/?{urllib.parse.urlencode(app_params)}'
|
||||||
json_str2 = await async_req(url=api2, proxy_addr=proxy_addr, headers=headers)
|
a_bogus = ab_sign(urllib.parse.urlparse(api2).query, headers['User-Agent'])
|
||||||
json_data2 = json.loads(json_str2)['data']
|
api2 += "&a_bogus=" + a_bogus
|
||||||
room_data2 = json_data2['room']
|
try:
|
||||||
room_data2['anchor_name'] = room_data2['owner']['nickname']
|
json_str2 = await async_req(url=api2, proxy_addr=proxy_addr, headers=headers)
|
||||||
return room_data2
|
if not json_str2:
|
||||||
|
raise Exception("it triggered risk control")
|
||||||
|
json_data2 = json.loads(json_str2)['data']
|
||||||
|
if not json_data2.get('room'):
|
||||||
|
raise Exception(f"{url} VR live is not supported")
|
||||||
|
room_data2 = json_data2['room']
|
||||||
|
room_data2['anchor_name'] = room_data2['owner']['nickname']
|
||||||
|
return room_data2
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f"Douyin app data fetch error, because {e}.")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
web_rid = url.split('?')[0].split('live.douyin.com/')
|
web_rid = url.split('?')[0].split('live.douyin.com/')
|
||||||
if len(web_rid) > 1:
|
if len(web_rid) > 1:
|
||||||
web_rid = web_rid[1]
|
return await get_douyin_web_stream_data(url, proxy_addr, cookies)
|
||||||
params = {
|
|
||||||
"aid": "6383",
|
|
||||||
"app_name": "douyin_web",
|
|
||||||
"live_id": "1",
|
|
||||||
"device_platform": "web",
|
|
||||||
"language": "zh-CN",
|
|
||||||
"browser_language": "zh-CN",
|
|
||||||
"browser_platform": "Win32",
|
|
||||||
"browser_name": "Chrome",
|
|
||||||
"browser_version": "116.0.0.0",
|
|
||||||
"web_rid": web_rid
|
|
||||||
|
|
||||||
}
|
|
||||||
api = f'https://live.douyin.com/webcast/room/web/enter/?{urllib.parse.urlencode(params)}'
|
|
||||||
json_str = await async_req(url=api, proxy_addr=proxy_addr, headers=headers)
|
|
||||||
json_data = json.loads(json_str)['data']
|
|
||||||
room_data = json_data['data'][0]
|
|
||||||
room_data['anchor_name'] = json_data['user']['nickname']
|
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
data = await get_sec_user_id(url, proxy_addr=proxy_addr)
|
data = await get_sec_user_id(url, proxy_addr=proxy_addr)
|
||||||
@ -137,9 +208,14 @@ async def get_douyin_app_stream_data(url: str, proxy_addr: OptionalStr = None, c
|
|||||||
json_str = live_core_sdk_data['pull_data']['stream_data']
|
json_str = live_core_sdk_data['pull_data']['stream_data']
|
||||||
json_data = json.loads(json_str)
|
json_data = json.loads(json_str)
|
||||||
if 'origin' in json_data['data']:
|
if 'origin' in json_data['data']:
|
||||||
|
stream_data = live_core_sdk_data['pull_data']['stream_data']
|
||||||
|
origin_data = json.loads(stream_data)['data']['origin']['main']
|
||||||
|
sdk_params = json.loads(origin_data['sdk_params'])
|
||||||
|
origin_hls_codec = sdk_params.get('VCodec') or ''
|
||||||
|
|
||||||
origin_url_list = json_data['data']['origin']['main']
|
origin_url_list = json_data['data']['origin']['main']
|
||||||
origin_m3u8 = {'ORIGIN': origin_url_list["hls"]}
|
origin_m3u8 = {'ORIGIN': origin_url_list["hls"] + '&codec=' + origin_hls_codec}
|
||||||
origin_flv = {'ORIGIN': origin_url_list["flv"]}
|
origin_flv = {'ORIGIN': origin_url_list["flv"] + '&codec=' + origin_hls_codec}
|
||||||
hls_pull_url_map = room_data['stream_url']['hls_pull_url_map']
|
hls_pull_url_map = room_data['stream_url']['hls_pull_url_map']
|
||||||
flv_pull_url = room_data['stream_url']['flv_pull_url']
|
flv_pull_url = room_data['stream_url']['flv_pull_url']
|
||||||
room_data['stream_url']['hls_pull_url_map'] = {**origin_m3u8, **hls_pull_url_map}
|
room_data['stream_url']['hls_pull_url_map'] = {**origin_m3u8, **hls_pull_url_map}
|
||||||
@ -192,8 +268,9 @@ async def get_douyin_stream_data(url: str, proxy_addr: OptionalStr = None, cooki
|
|||||||
origin_url_list = json.loads(match_json_str3.group(1) + '}')
|
origin_url_list = json.loads(match_json_str3.group(1) + '}')
|
||||||
|
|
||||||
if origin_url_list:
|
if origin_url_list:
|
||||||
origin_m3u8 = {'ORIGIN': origin_url_list["hls"]}
|
origin_hls_codec = origin_url_list['sdk_params'].get('VCodec') or ''
|
||||||
origin_flv = {'ORIGIN': origin_url_list["flv"]}
|
origin_m3u8 = {'ORIGIN': origin_url_list["hls"] + '&codec=' + origin_hls_codec}
|
||||||
|
origin_flv = {'ORIGIN': origin_url_list["flv"] + '&codec=' + origin_hls_codec}
|
||||||
hls_pull_url_map = json_data['stream_url']['hls_pull_url_map']
|
hls_pull_url_map = json_data['stream_url']['hls_pull_url_map']
|
||||||
flv_pull_url = json_data['stream_url']['flv_pull_url']
|
flv_pull_url = json_data['stream_url']['flv_pull_url']
|
||||||
json_data['stream_url']['hls_pull_url_map'] = {**origin_m3u8, **hls_pull_url_map}
|
json_data['stream_url']['hls_pull_url_map'] = {**origin_m3u8, **hls_pull_url_map}
|
||||||
@ -208,18 +285,20 @@ async def get_douyin_stream_data(url: str, proxy_addr: OptionalStr = None, cooki
|
|||||||
@trace_error_decorator
|
@trace_error_decorator
|
||||||
async def get_tiktok_stream_data(url: str, proxy_addr: OptionalStr = None, cookies: OptionalStr = None) -> dict | None:
|
async def get_tiktok_stream_data(url: str, proxy_addr: OptionalStr = None, cookies: OptionalStr = None) -> dict | None:
|
||||||
headers = {
|
headers = {
|
||||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
'referer': 'https://www.tiktok.com/',
|
||||||
'Cookie': 'ttwid=1%7CM-rF193sJugKuNz2RGNt-rh6pAAR9IMceUSzlDnPCNI%7C1683274418%7Cf726d4947f2fc37fecc7aeb0cdaee52892244d04efde6f8a8edd2bb168263269; tiktok_webapp_theme=light; tt_chain_token=VWkygAWDlm1cFg/k8whmOg==; passport_csrf_token=6e422c5a7991f8cec7033a8082921510; passport_csrf_token_default=6e422c5a7991f8cec7033a8082921510; d_ticket=f8c267d4af4523c97be1ccb355e9991e2ae06; odin_tt=320b5f386cdc23f347be018e588873db7f7aea4ea5d1813681c3fbc018ea025dde957b94f74146dbc0e3612426b865ccb95ec8abe4ee36cca65f15dbffec0deff7b0e69e8ea536d46e0f82a4fc37d211; cmpl_token=AgQQAPNSF-RO0rT04baWtZ0T_jUjl4fVP4PZYM2QPw; uid_tt=319b558dbba684bb1557206c92089cd113a875526a89aee30595925d804b81c7; uid_tt_ss=319b558dbba684bb1557206c92089cd113a875526a89aee30595925d804b81c7; sid_tt=ad5e736f4bedb2f6d42ccd849e706b1d; sessionid=ad5e736f4bedb2f6d42ccd849e706b1d; sessionid_ss=ad5e736f4bedb2f6d42ccd849e706b1d; store-idc=useast5; store-country-code=us; store-country-code-src=uid; tt-target-idc=useast5; tt-target-idc-sign=qXNk0bb1pDQ0FbCNF120Pl9WWMLZg9Edv5PkfyCbS4lIk5ieW5tfLP7XWROnN0mEaSlc5hg6Oji1pF-yz_3ZXnUiNMrA9wNMPvI6D9IFKKVmq555aQzwPIGHv0aQC5dNRgKo5Z5LBkgxUMWEojTKclq2_L8lBciw0IGdhFm_XyVJtbqbBKKgybGDLzK8ZyxF4Jl_cYRXaDlshZjc38JdS6wruDueRSHe7YvNbjxCnApEFUv-OwJANSPU_4rvcqpVhq3JI2VCCfw-cs_4MFIPCDOKisk5EhAo2JlHh3VF7_CLuv80FXg_7ZqQ2pJeMOog294rqxwbbQhl3ATvjQV_JsWyUsMd9zwqecpylrPvtySI2u1qfoggx1owLrrUynee1R48QlanLQnTNW_z1WpmZBgVJqgEGLwFoVOmRzJuFFNj8vIqdjM2nDSdWqX8_wX3wplohkzkPSFPfZgjzGnQX28krhgTytLt7BXYty5dpfGtsdb11WOFHM6MZ9R9uLVB; sid_guard=ad5e736f4bedb2f6d42ccd849e706b1d%7C1690990657%7C15525213%7CMon%2C+29-Jan-2024+08%3A11%3A10+GMT; sid_ucp_v1=1.0.0-KGM3YzgwYjZhODgyYWI1NjIwNTA0NjBmOWUxMGRhMjIzYTI2YjMxNDUKGAiqiJ30keKD5WQQwfCppgYYsws4AkDsBxAEGgd1c2Vhc3Q1IiBhZDVlNzM2ZjRiZWRiMmY2ZDQyY2NkODQ5ZTcwNmIxZA; ssid_ucp_v1=1.0.0-KGM3YzgwYjZhODgyYWI1NjIwNTA0NjBmOWUxMGRhMjIzYTI2YjMxNDUKGAiqiJ30keKD5WQQwfCppgYYsws4AkDsBxAEGgd1c2Vhc3Q1IiBhZDVlNzM2ZjRiZWRiMmY2ZDQyY2NkODQ5ZTcwNmIxZA; tt_csrf_token=dD0EIH8q-pe3qDQsCyyD1jLN6KizJDRjOEyk; __tea_cache_tokens_1988={%22_type_%22:%22default%22%2C%22user_unique_id%22:%227229608516049831425%22%2C%22timestamp%22:1683274422659}; ttwid=1%7CM-rF193sJugKuNz2RGNt-rh6pAAR9IMceUSzlDnPCNI%7C1694002151%7Cd89b77afc809b1a610661a9d1c2784d80ebef9efdd166f06de0d28e27f7e4efe; msToken=KfJAVZ7r9D_QVeQlYAUZzDFbc1Yx-nZz6GF33eOxgd8KlqvTg1lF9bMXW7gFV-qW4MCgUwnBIhbiwU9kdaSpgHJCk-PABsHCtTO5J3qC4oCTsrXQ1_E0XtbqiE4OVLZ_jdF1EYWgKNPT2SnwGkQ=; msToken=KfJAVZ7r9D_QVeQlYAUZzDFbc1Yx-nZz6GF33eOxgd8KlqvTg1lF9bMXW7gFV-qW4MCgUwnBIhbiwU9kdaSpgHJCk-PABsHCtTO5J3qC4oCTsrXQ1_E0XtbqiE4OVLZ_jdF1EYWgKNPT2SnwGkQ='
|
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
|
||||||
|
'Chrome/141.0.0.0 Safari/537.36',
|
||||||
|
'cookie': cookies or '1%7Cz7FKki38aKyy7i-BC9rEDwcrVvjcLcFEL6QIeqldoy4%7C1761302831%7C6c1461e9f1f980cbe0404c5190'
|
||||||
|
'5177d5d53bbd822e1bf66128887d942c9c3e2f'
|
||||||
}
|
}
|
||||||
if cookies:
|
|
||||||
headers['Cookie'] = cookies
|
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
html_str = await async_req(url=url, proxy_addr=proxy_addr, headers=headers, abroad=True)
|
html_str = await async_req(url=url, proxy_addr=proxy_addr, headers=headers, abroad=True, http2=False)
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
if "We regret to inform you that we have discontinued operating TikTok" in html_str:
|
if "We regret to inform you that we have discontinued operating TikTok" in html_str:
|
||||||
msg = re.search('<p>\n\\s+(We regret to inform you that we have discontinu.*?)\\.\n\\s+</p>', html_str)
|
msg = re.search('<p>\n\\s+(We regret to inform you that we have discontinu.*?)\\.\n\\s+</p>', html_str)
|
||||||
raise ConnectionError(
|
raise ConnectionError(
|
||||||
f"Your proxy node's regional network is blocked from accessing TikTok; please switch to a node in "
|
"Your proxy node's regional network is blocked from accessing TikTok; please switch to a node in "
|
||||||
f"another region to access. {msg.group(1) if msg else ''}"
|
f"another region to access. {msg.group(1) if msg else ''}"
|
||||||
)
|
)
|
||||||
if 'UNEXPECTED_EOF_WHILE_READING' not in html_str:
|
if 'UNEXPECTED_EOF_WHILE_READING' not in html_str:
|
||||||
@ -590,7 +669,7 @@ async def get_bilibili_room_info_h5(url: str, proxy_addr: OptionalStr = None, co
|
|||||||
api = f'https://api.live.bilibili.com/xlive/web-room/v1/index/getH5InfoByRoom?room_id={room_id}'
|
api = f'https://api.live.bilibili.com/xlive/web-room/v1/index/getH5InfoByRoom?room_id={room_id}'
|
||||||
json_str = await async_req(api, proxy_addr=proxy_addr, headers=headers)
|
json_str = await async_req(api, proxy_addr=proxy_addr, headers=headers)
|
||||||
room_info = json.loads(json_str)
|
room_info = json.loads(json_str)
|
||||||
title = room_info['data']['room_info']['title']
|
title = room_info['data']['room_info'].get('title') if room_info.get('data') else ''
|
||||||
return title
|
return title
|
||||||
|
|
||||||
|
|
||||||
@ -762,7 +841,7 @@ async def get_bigo_stream_url(url: str, proxy_addr: OptionalStr = None, cookies:
|
|||||||
if '&h=' in url:
|
if '&h=' in url:
|
||||||
room_id = url.split('&h=')[-1]
|
room_id = url.split('&h=')[-1]
|
||||||
else:
|
else:
|
||||||
room_id = re.search('www.bigo.tv/cn/(\\w+)', url).group(1)
|
room_id = url.split("?")[0].rsplit("/", maxsplit=1)[-1]
|
||||||
|
|
||||||
data = {'siteId': room_id} # roomId
|
data = {'siteId': room_id} # roomId
|
||||||
url2 = 'https://ta.bigo.tv/official_website/studio/getInternalStudioInfo'
|
url2 = 'https://ta.bigo.tv/official_website/studio/getInternalStudioInfo'
|
||||||
@ -779,8 +858,16 @@ async def get_bigo_stream_url(url: str, proxy_addr: OptionalStr = None, cookies:
|
|||||||
result['record_url'] = m3u8_url
|
result['record_url'] = m3u8_url
|
||||||
result |= {"title": live_title, "is_live": True, "m3u8_url": m3u8_url, 'record_url': m3u8_url}
|
result |= {"title": live_title, "is_live": True, "m3u8_url": m3u8_url, 'record_url': m3u8_url}
|
||||||
elif result['anchor_name'] == '':
|
elif result['anchor_name'] == '':
|
||||||
html_str = await async_req(url=f'https://www.bigo.tv/cn/{room_id}', proxy_addr=proxy_addr, headers=headers)
|
html_str = await async_req(url=f'https://www.bigo.tv/{url.split("/")[3]}/{room_id}',
|
||||||
result['anchor_name'] = re.search('<title>欢迎来到(.*?)的直播间</title>', html_str, re.DOTALL).group(1)
|
proxy_addr=proxy_addr, headers=headers)
|
||||||
|
match_anchor_name = re.search('<title>欢迎来到(.*?)的直播间</title>', html_str, re.DOTALL)
|
||||||
|
if match_anchor_name:
|
||||||
|
anchor_name = match_anchor_name.group(1)
|
||||||
|
else:
|
||||||
|
match_anchor_name = re.search('<meta data-n-head="ssr" data-hid="og:title" property="og:title" '
|
||||||
|
'content="(.*?) - BIGO LIVE">', html_str, re.DOTALL)
|
||||||
|
anchor_name = match_anchor_name.group(1)
|
||||||
|
result['anchor_name'] = anchor_name
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -917,6 +1004,76 @@ async def get_sooplive_tk(url: str, rtype: str, proxy_addr: OptionalStr = None,
|
|||||||
return f"{bj_name}-{bj_id}", json_data['CHANNEL']['BNO']
|
return f"{bj_name}-{bj_id}", json_data['CHANNEL']['BNO']
|
||||||
|
|
||||||
|
|
||||||
|
def get_soop_headers(cookies):
|
||||||
|
headers = {
|
||||||
|
'client-id': str(uuid.uuid4()),
|
||||||
|
'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 18_5 like Mac OS X) AppleWebKit/605.1.15 (KHTML, '
|
||||||
|
'like Gecko) Version/18.5 Mobile/15E148 Safari/604.1 Edg/141.0.0.0',
|
||||||
|
}
|
||||||
|
if cookies:
|
||||||
|
headers['cookie'] = cookies
|
||||||
|
return headers
|
||||||
|
|
||||||
|
|
||||||
|
async def _get_soop_channel_info_global(bj_id, proxy_addr: OptionalStr = None, cookies: OptionalStr = None) -> str:
|
||||||
|
headers = get_soop_headers(cookies)
|
||||||
|
api = 'https://api.sooplive.com/v2/channel/info/' + str(bj_id)
|
||||||
|
json_str = await async_req(api, proxy_addr=proxy_addr, headers=headers)
|
||||||
|
json_data = json.loads(json_str)
|
||||||
|
nickname = json_data['data']['streamerChannelInfo']['nickname']
|
||||||
|
channelId = json_data['data']['streamerChannelInfo']['channelId']
|
||||||
|
anchor_name = f"{nickname}-{channelId}"
|
||||||
|
return anchor_name
|
||||||
|
|
||||||
|
|
||||||
|
async def _get_soop_stream_info_global(bj_id, proxy_addr: OptionalStr = None, cookies: OptionalStr = None) -> tuple:
|
||||||
|
headers = get_soop_headers(cookies)
|
||||||
|
api = 'https://api.sooplive.com/v2/stream/info/' + str(bj_id)
|
||||||
|
json_str = await async_req(api, proxy_addr=proxy_addr, headers=headers)
|
||||||
|
json_data = json.loads(json_str)
|
||||||
|
status = json_data['data']['isStream']
|
||||||
|
title = json_data['data']['title']
|
||||||
|
return status, title
|
||||||
|
|
||||||
|
|
||||||
|
async def _fetch_web_stream_data_global(url: str, proxy_addr: OptionalStr = None, cookies: OptionalStr = None) -> dict:
|
||||||
|
split_url = url.split('/')
|
||||||
|
bj_id = split_url[3] if len(split_url) < 6 else split_url[5]
|
||||||
|
anchor_name = await _get_soop_channel_info_global(bj_id)
|
||||||
|
result = {"anchor_name": anchor_name or '', "is_live": False, "live_url": url}
|
||||||
|
status, title = await _get_soop_stream_info_global(bj_id)
|
||||||
|
if not status:
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
async def _get_url_list(m3u8: str) -> list[str]:
|
||||||
|
headers = {
|
||||||
|
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
|
||||||
|
'Chrome/141.0.0.0 Safari/537.36 Edg/141.0.0.0',
|
||||||
|
}
|
||||||
|
if cookies:
|
||||||
|
headers['cookie'] = cookies
|
||||||
|
resp = await async_req(url=m3u8, proxy_addr=proxy_addr, headers=headers)
|
||||||
|
play_url_list = []
|
||||||
|
url_prefix = '/'.join(m3u8.split('/')[0:3])
|
||||||
|
for i in resp.split('\n'):
|
||||||
|
if not i.startswith('#') and i.strip():
|
||||||
|
play_url_list.append(url_prefix + i.strip())
|
||||||
|
bandwidth_pattern = re.compile(r'BANDWIDTH=(\d+)')
|
||||||
|
bandwidth_list = bandwidth_pattern.findall(resp)
|
||||||
|
url_to_bandwidth = {purl: int(bandwidth) for bandwidth, purl in zip(bandwidth_list, play_url_list)}
|
||||||
|
play_url_list = sorted(play_url_list, key=lambda purl: url_to_bandwidth[purl], reverse=True)
|
||||||
|
return play_url_list
|
||||||
|
|
||||||
|
m3u8_url = 'https://global-media.sooplive.com/live/' + str(bj_id) + '/master.m3u8'
|
||||||
|
result |= {
|
||||||
|
'is_live': True,
|
||||||
|
'title': title,
|
||||||
|
'm3u8_url': m3u8_url,
|
||||||
|
'play_url_list': await _get_url_list(m3u8_url)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
@trace_error_decorator
|
@trace_error_decorator
|
||||||
async def get_sooplive_stream_data(
|
async def get_sooplive_stream_data(
|
||||||
url: str, proxy_addr: OptionalStr = None, cookies: OptionalStr = None,
|
url: str, proxy_addr: OptionalStr = None, cookies: OptionalStr = None,
|
||||||
@ -931,6 +1088,9 @@ async def get_sooplive_stream_data(
|
|||||||
if cookies:
|
if cookies:
|
||||||
headers['Cookie'] = cookies
|
headers['Cookie'] = cookies
|
||||||
|
|
||||||
|
if "sooplive.com" in url:
|
||||||
|
return await _fetch_web_stream_data_global(url, proxy_addr, cookies)
|
||||||
|
|
||||||
split_url = url.split('/')
|
split_url = url.split('/')
|
||||||
bj_id = split_url[3] if len(split_url) < 6 else split_url[5]
|
bj_id = split_url[3] if len(split_url) < 6 else split_url[5]
|
||||||
|
|
||||||
@ -1011,8 +1171,8 @@ async def get_sooplive_stream_data(
|
|||||||
else:
|
else:
|
||||||
raise RuntimeError("sooplive login failed, please check if the account and password are correct")
|
raise RuntimeError("sooplive login failed, please check if the account and password are correct")
|
||||||
elif json_data['data']['code'] == -6001:
|
elif json_data['data']['code'] == -6001:
|
||||||
print(f"error message:Please check if the input sooplive live room address "
|
print("error message:Please check if the input sooplive live room address "
|
||||||
f"is correct.")
|
"is correct.")
|
||||||
return result
|
return result
|
||||||
if json_data['result'] == 1 and anchor_name:
|
if json_data['result'] == 1 and anchor_name:
|
||||||
broad_no = json_data['data']['broad_no']
|
broad_no = json_data['data']['broad_no']
|
||||||
@ -1248,7 +1408,7 @@ async def login_flextv(username: str, password: str, proxy_addr: OptionalStr = N
|
|||||||
'accept': 'application/json, text/plain, */*',
|
'accept': 'application/json, text/plain, */*',
|
||||||
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
|
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
|
||||||
'content-type': 'application/json;charset=UTF-8',
|
'content-type': 'application/json;charset=UTF-8',
|
||||||
'referer': 'https://www.flextv.co.kr/',
|
'referer': 'https://www.ttinglive.com/',
|
||||||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1260,7 +1420,7 @@ async def login_flextv(username: str, password: str, proxy_addr: OptionalStr = N
|
|||||||
'device': 'PCWEB',
|
'device': 'PCWEB',
|
||||||
}
|
}
|
||||||
|
|
||||||
url = 'https://api.flextv.co.kr/v2/api/auth/signin'
|
url = 'https://www.ttinglive.com/v2/api/auth/signin'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
print("Logging into FlexTV platform...")
|
print("Logging into FlexTV platform...")
|
||||||
@ -1288,13 +1448,13 @@ async def get_flextv_stream_url(
|
|||||||
headers = {
|
headers = {
|
||||||
'accept': 'application/json, text/plain, */*',
|
'accept': 'application/json, text/plain, */*',
|
||||||
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
|
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
|
||||||
'referer': 'https://www.flextv.co.kr/',
|
'referer': 'https://www.ttinglive.com/',
|
||||||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
||||||
}
|
}
|
||||||
user_id = url.split('/live')[0].rsplit('/', maxsplit=1)[-1]
|
user_id = url.split('/live')[0].rsplit('/', maxsplit=1)[-1]
|
||||||
if cookie:
|
if cookie:
|
||||||
headers['Cookie'] = cookie
|
headers['Cookie'] = cookie
|
||||||
play_api = f'https://api.flextv.co.kr/api/channels/{user_id}/stream?option=all'
|
play_api = f'https://www.ttinglive.com/api/channels/{user_id}/stream?option=all'
|
||||||
json_str = await async_req(play_api, proxy_addr=proxy_addr, headers=headers, abroad=True)
|
json_str = await async_req(play_api, proxy_addr=proxy_addr, headers=headers, abroad=True)
|
||||||
if 'HTTP Error 400: Bad Request' in json_str:
|
if 'HTTP Error 400: Bad Request' in json_str:
|
||||||
raise ConnectionError(
|
raise ConnectionError(
|
||||||
@ -1316,7 +1476,7 @@ async def get_flextv_stream_data(
|
|||||||
headers = {
|
headers = {
|
||||||
'accept': 'application/json, text/plain, */*',
|
'accept': 'application/json, text/plain, */*',
|
||||||
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
|
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
|
||||||
'referer': 'https://www.flextv.co.kr/',
|
'referer': 'https://www.ttinglive.com/',
|
||||||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
||||||
}
|
}
|
||||||
if cookies:
|
if cookies:
|
||||||
@ -1325,7 +1485,7 @@ async def get_flextv_stream_data(
|
|||||||
result = {"anchor_name": '', "is_live": False}
|
result = {"anchor_name": '', "is_live": False}
|
||||||
new_cookies = None
|
new_cookies = None
|
||||||
try:
|
try:
|
||||||
url2 = f'https://www.flextv.co.kr/channels/{user_id}/live'
|
url2 = f'https://www.ttinglive.com/channels/{user_id}/live'
|
||||||
html_str = await async_req(url2, proxy_addr=proxy_addr, headers=headers, abroad=True)
|
html_str = await async_req(url2, proxy_addr=proxy_addr, headers=headers, abroad=True)
|
||||||
json_str = re.search('<script id="__NEXT_DATA__" type=".*">(.*?)</script>', html_str).group(1)
|
json_str = re.search('<script id="__NEXT_DATA__" type=".*">(.*?)</script>', html_str).group(1)
|
||||||
json_data = json.loads(json_str)
|
json_data = json.loads(json_str)
|
||||||
@ -1357,13 +1517,17 @@ async def get_flextv_stream_data(
|
|||||||
result["anchor_name"] = anchor_name
|
result["anchor_name"] = anchor_name
|
||||||
play_url = await get_flextv_stream_url(url=url, proxy_addr=proxy_addr, cookies=cookies)
|
play_url = await get_flextv_stream_url(url=url, proxy_addr=proxy_addr, cookies=cookies)
|
||||||
if play_url:
|
if play_url:
|
||||||
play_url_list = await get_play_url_list(m3u8=play_url, proxy=proxy_addr, header=headers, abroad=True)
|
result['is_live'] = True
|
||||||
if play_url_list:
|
if '.m3u8' in play_url:
|
||||||
result['m3u8_url'] = play_url
|
play_url_list = await get_play_url_list(m3u8=play_url, proxy=proxy_addr, header=headers, abroad=True)
|
||||||
result['play_url_list'] = play_url_list
|
if play_url_list:
|
||||||
result['is_live'] = True
|
result['m3u8_url'] = play_url
|
||||||
|
result['play_url_list'] = play_url_list
|
||||||
|
else:
|
||||||
|
result['flv_url'] = play_url
|
||||||
|
result['record_url'] = play_url
|
||||||
else:
|
else:
|
||||||
url2 = f'https://www.flextv.co.kr/channels/{user_id}'
|
url2 = f'https://www.ttinglive.com/channels/{user_id}'
|
||||||
html_str = await async_req(url2, proxy_addr=proxy_addr, headers=headers, abroad=True)
|
html_str = await async_req(url2, proxy_addr=proxy_addr, headers=headers, abroad=True)
|
||||||
anchor_name = re.search('<meta name="twitter:title" content="(.*?)의', html_str).group(1)
|
anchor_name = re.search('<meta name="twitter:title" content="(.*?)의', html_str).group(1)
|
||||||
result["anchor_name"] = anchor_name
|
result["anchor_name"] = anchor_name
|
||||||
@ -1701,7 +1865,7 @@ async def login_twitcasting(
|
|||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
cookie_dict = await async_req(login_api, proxy_addr=proxy_addr, headers=headers,
|
cookie_dict = await async_req(login_api, proxy_addr=proxy_addr, headers=headers,
|
||||||
json_data=data, return_cookies=True, timeout=20)
|
data=data, return_cookies=True, timeout=20)
|
||||||
if 'tc_ss' in cookie_dict:
|
if 'tc_ss' in cookie_dict:
|
||||||
cookie = utils.dict_to_cookie_str(cookie_dict)
|
cookie = utils.dict_to_cookie_str(cookie_dict)
|
||||||
return cookie
|
return cookie
|
||||||
@ -1750,7 +1914,7 @@ async def get_twitcasting_stream_url(
|
|||||||
"configuration file is correct")
|
"configuration file is correct")
|
||||||
print("TwitCasting login successful! Starting to fetch data...")
|
print("TwitCasting login successful! Starting to fetch data...")
|
||||||
headers['Cookie'] = new_cookie
|
headers['Cookie'] = new_cookie
|
||||||
anchor_name, live_status, live_title = get_data(headers)
|
anchor_name, live_status, live_title = await get_data(headers)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
print("Failed to retrieve TwitCasting data, attempting to log in...")
|
print("Failed to retrieve TwitCasting data, attempting to log in...")
|
||||||
new_cookie = await login_twitcasting(
|
new_cookie = await login_twitcasting(
|
||||||
@ -1764,8 +1928,17 @@ async def get_twitcasting_stream_url(
|
|||||||
|
|
||||||
result["anchor_name"] = anchor_name
|
result["anchor_name"] = anchor_name
|
||||||
if live_status == 'true':
|
if live_status == 'true':
|
||||||
play_url = f'https://twitcasting.tv/{anchor_id}/metastream.m3u8/?video=1&mode=source'
|
url_streamserver = f"https://twitcasting.tv/streamserver.php?target={anchor_id}&mode=client&player=pc_web"
|
||||||
result |= {'title': live_title, 'is_live': True, "m3u8_url": play_url, "record_url": play_url}
|
stream_data = await async_req(url_streamserver, proxy_addr=proxy_addr, headers=headers)
|
||||||
|
json_data = json.loads(stream_data)
|
||||||
|
if not json_data.get('tc-hls') or not json_data['tc-hls'].get("streams"):
|
||||||
|
raise RuntimeError("No m3u8_url,please check the url")
|
||||||
|
|
||||||
|
stream_dict = json_data['tc-hls']["streams"]
|
||||||
|
quality_order = {"high": 0, "medium": 1, "low": 2}
|
||||||
|
sorted_streams = sorted(stream_dict.items(), key=lambda item: quality_order[item[0]])
|
||||||
|
play_url_list = [url for quality, url in sorted_streams]
|
||||||
|
result |= {'title': live_title, 'is_live': True, "play_url_list": play_url_list}
|
||||||
result['new_cookies'] = new_cookie
|
result['new_cookies'] = new_cookie
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -1836,6 +2009,7 @@ async def get_weibo_stream_data(url: str, proxy_addr: OptionalStr = None, cookie
|
|||||||
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
|
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
|
||||||
'Cookie': 'XSRF-TOKEN=qAP-pIY5V4tO6blNOhA4IIOD; SUB=_2AkMRNMCwf8NxqwFRmfwWymPrbI9-zgzEieKnaDFrJRMxHRl-yT9kqmkhtRB6OrTuX5z9N_7qk9C3xxEmNR-8WLcyo2PM; SUBP=0033WrSXqPxfM72-Ws9jqgMF55529P9D9WWemwcqkukCduUO11o9sBqA; WBPSESS=Wk6CxkYDejV3DDBcnx2LOXN9V1LjdSTNQPMbBDWe4lO2HbPmXG_coMffJ30T-Avn_ccQWtEYFcq9fab1p5RR6PEI6w661JcW7-56BszujMlaiAhLX-9vT4Zjboy1yf2l',
|
'Cookie': 'XSRF-TOKEN=qAP-pIY5V4tO6blNOhA4IIOD; SUB=_2AkMRNMCwf8NxqwFRmfwWymPrbI9-zgzEieKnaDFrJRMxHRl-yT9kqmkhtRB6OrTuX5z9N_7qk9C3xxEmNR-8WLcyo2PM; SUBP=0033WrSXqPxfM72-Ws9jqgMF55529P9D9WWemwcqkukCduUO11o9sBqA; WBPSESS=Wk6CxkYDejV3DDBcnx2LOXN9V1LjdSTNQPMbBDWe4lO2HbPmXG_coMffJ30T-Avn_ccQWtEYFcq9fab1p5RR6PEI6w661JcW7-56BszujMlaiAhLX-9vT4Zjboy1yf2l',
|
||||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:124.0) Gecko/20100101 Firefox/124.0',
|
||||||
|
'Referer': 'https://weibo.com/u/5885340893'
|
||||||
}
|
}
|
||||||
if cookies:
|
if cookies:
|
||||||
headers['Cookie'] = cookies
|
headers['Cookie'] = cookies
|
||||||
@ -1970,6 +2144,7 @@ async def get_twitchtv_stream_data(url: str, proxy_addr: OptionalStr = None, coo
|
|||||||
'Accept-Language': 'en-US',
|
'Accept-Language': 'en-US',
|
||||||
'Referer': 'https://www.twitch.tv/',
|
'Referer': 'https://www.twitch.tv/',
|
||||||
'Client-ID': 'kimne78kx3ncx6brgo4mv6wki5h1ko',
|
'Client-ID': 'kimne78kx3ncx6brgo4mv6wki5h1ko',
|
||||||
|
'device-id': generate_random_string(16).lower(),
|
||||||
}
|
}
|
||||||
|
|
||||||
if cookies:
|
if cookies:
|
||||||
@ -2040,6 +2215,12 @@ async def get_liveme_stream_url(url: str, proxy_addr: OptionalStr = None, cookie
|
|||||||
if cookies:
|
if cookies:
|
||||||
headers['Cookie'] = cookies
|
headers['Cookie'] = cookies
|
||||||
|
|
||||||
|
if 'index.html' not in url:
|
||||||
|
html_str = await async_req(url, proxy_addr=proxy_addr, headers=headers, abroad=True)
|
||||||
|
match_url = re.search('<meta property="og:url" content="(.*?)">', html_str)
|
||||||
|
if match_url:
|
||||||
|
url = match_url.group(1)
|
||||||
|
|
||||||
room_id = url.split("/index.html")[0].rsplit('/', maxsplit=1)[-1]
|
room_id = url.split("/index.html")[0].rsplit('/', maxsplit=1)[-1]
|
||||||
sign_data = execjs.compile(open(f'{JS_SCRIPT_PATH}/liveme.js').read()).call('sign', room_id,
|
sign_data = execjs.compile(open(f'{JS_SCRIPT_PATH}/liveme.js').read()).call('sign', room_id,
|
||||||
f'{JS_SCRIPT_PATH}/crypto-js.min.js')
|
f'{JS_SCRIPT_PATH}/crypto-js.min.js')
|
||||||
@ -2998,7 +3179,7 @@ async def get_faceit_stream_data(url: str, proxy_addr: OptionalStr = None, cooki
|
|||||||
|
|
||||||
if cookies:
|
if cookies:
|
||||||
headers['Cookie'] = cookies
|
headers['Cookie'] = cookies
|
||||||
nickname = re.findall(f'/players/(.*?)/stream', url)[0]
|
nickname = re.findall('/players/(.*?)/stream', url)[0]
|
||||||
api = f'https://www.faceit.com/api/users/v1/nicknames/{nickname}'
|
api = f'https://www.faceit.com/api/users/v1/nicknames/{nickname}'
|
||||||
json_str = await async_req(api, proxy_addr=proxy_addr, headers=headers)
|
json_str = await async_req(api, proxy_addr=proxy_addr, headers=headers)
|
||||||
json_data = json.loads(json_str)
|
json_data = json.loads(json_str)
|
||||||
@ -3015,4 +3196,200 @@ async def get_faceit_stream_data(url: str, proxy_addr: OptionalStr = None, cooki
|
|||||||
result['anchor_name'] = anchor_name
|
result['anchor_name'] = anchor_name
|
||||||
else:
|
else:
|
||||||
result = {'anchor_name': anchor_name, 'is_live': False}
|
result = {'anchor_name': anchor_name, 'is_live': False}
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@trace_error_decorator
|
||||||
|
async def get_migu_stream_url(url: str, proxy_addr: OptionalStr = None, cookies: OptionalStr = None) -> dict:
|
||||||
|
headers = {
|
||||||
|
'origin': 'https://www.miguvideo.com',
|
||||||
|
'referer': 'https://www.miguvideo.com/',
|
||||||
|
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
|
||||||
|
'Chrome/121.0.0.0 Safari/537.36 Edg/121.0.0.0',
|
||||||
|
'appCode': 'miguvideo_default_www',
|
||||||
|
'appId': 'miguvideo',
|
||||||
|
'channel': 'H5',
|
||||||
|
}
|
||||||
|
|
||||||
|
if cookies:
|
||||||
|
headers['Cookie'] = cookies
|
||||||
|
|
||||||
|
web_id = url.split('?')[0].rsplit('/')[-1]
|
||||||
|
api = f'https://vms-sc.miguvideo.com/vms-match/v6/staticcache/basic/basic-data/{web_id}/miguvideo'
|
||||||
|
json_str = await async_req(api, proxy_addr=proxy_addr, headers=headers)
|
||||||
|
json_data = json.loads(json_str)
|
||||||
|
|
||||||
|
anchor_name = json_data['body']['title']
|
||||||
|
live_title = json_data['body'].get('title') + '-' + json_data['body'].get('detailPageTitle', '')
|
||||||
|
room_id = json_data['body'].get('pId')
|
||||||
|
|
||||||
|
result = {"anchor_name": anchor_name, "is_live": False}
|
||||||
|
if not room_id:
|
||||||
|
return result
|
||||||
|
|
||||||
|
params = {
|
||||||
|
'contId': room_id,
|
||||||
|
'rateType': '3',
|
||||||
|
'clientId': str(uuid.uuid4()),
|
||||||
|
'timestamp': int(time.time() * 1000),
|
||||||
|
'flvEnable': 'true',
|
||||||
|
'xh265': 'false',
|
||||||
|
'chip': 'mgwww',
|
||||||
|
'channelId': '',
|
||||||
|
}
|
||||||
|
|
||||||
|
api = f'https://webapi.miguvideo.com/gateway/playurl/v3/play/playurl?{urllib.parse.urlencode(params)}'
|
||||||
|
json_str = await async_req(api, proxy_addr=proxy_addr, headers=headers)
|
||||||
|
json_data = json.loads(json_str)
|
||||||
|
live_status = json_data['body']['content']['currentLive']
|
||||||
|
if live_status != '1':
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
result['title'] = live_title
|
||||||
|
source_url = json_data['body']['urlInfo']['url']
|
||||||
|
|
||||||
|
async def _get_dd_calcu(url):
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
["node", f"{JS_SCRIPT_PATH}/migu.js", url],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
check=True
|
||||||
|
)
|
||||||
|
return result.stdout.strip()
|
||||||
|
except execjs.ProgramError:
|
||||||
|
raise execjs.ProgramError('Failed to execute JS code. Please check if the Node.js environment')
|
||||||
|
|
||||||
|
ddCalcu = await _get_dd_calcu(source_url)
|
||||||
|
real_source_url = f'{source_url}&ddCalcu={ddCalcu}&sv=10010'
|
||||||
|
if '.m3u8' in real_source_url:
|
||||||
|
m3u8_url = await async_req(
|
||||||
|
real_source_url, proxy_addr=proxy_addr, headers=headers, redirect_url=True)
|
||||||
|
result['m3u8_url'] = m3u8_url
|
||||||
|
result['record_url'] = m3u8_url
|
||||||
|
else:
|
||||||
|
result['flv_url'] = real_source_url
|
||||||
|
result['record_url'] = real_source_url
|
||||||
|
result['is_live'] = True
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@trace_error_decorator
|
||||||
|
async def get_lianjie_stream_url(url: str, proxy_addr: OptionalStr = None, cookies: OptionalStr = None) -> dict:
|
||||||
|
headers = {
|
||||||
|
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
|
||||||
|
'Chrome/121.0.0.0 Safari/537.36 Edg/121.0.0.0',
|
||||||
|
'accept-language': 'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
|
||||||
|
}
|
||||||
|
|
||||||
|
if cookies:
|
||||||
|
headers['cookie'] = cookies
|
||||||
|
|
||||||
|
room_id = url.split('?')[0].rsplit('lailianjie.com/', maxsplit=1)[-1]
|
||||||
|
play_api = f'https://api.lailianjie.com/ApiServices/service/live/getRoomInfo?&_$t=&_sign=&roomNumber={room_id}'
|
||||||
|
json_str = await async_req(play_api, proxy_addr=proxy_addr, headers=headers)
|
||||||
|
json_data = json.loads(json_str)
|
||||||
|
|
||||||
|
room_data = json_data['data']
|
||||||
|
anchor_name = room_data['nickname']
|
||||||
|
live_status = room_data['isonline']
|
||||||
|
|
||||||
|
result = {"anchor_name": anchor_name, "is_live": False}
|
||||||
|
if live_status == 1:
|
||||||
|
title = room_data['defaultRoomTitle']
|
||||||
|
webrtc_url = room_data['videoUrl']
|
||||||
|
https_url = "https://" + webrtc_url.split('webrtc://')[1]
|
||||||
|
flv_url = https_url.replace('?', '.flv?')
|
||||||
|
m3u8_url = https_url.replace('?', '.m3u8?')
|
||||||
|
result |= {'is_live': True, 'title': title, 'm3u8_url': m3u8_url, 'flv_url': flv_url, 'record_url': flv_url}
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@trace_error_decorator
|
||||||
|
async def get_laixiu_stream_url(url: str, proxy_addr: OptionalStr = None, cookies: OptionalStr = None) -> dict:
|
||||||
|
def generate_uuid(ua_type: str):
|
||||||
|
if ua_type == "mobile":
|
||||||
|
return str(uuid.uuid4())
|
||||||
|
return str(uuid.uuid4()).replace('-', '')
|
||||||
|
|
||||||
|
def calculate_sign(ua_type: str = 'pc'):
|
||||||
|
a = int(time.time() * 1000)
|
||||||
|
s = generate_uuid(ua_type)
|
||||||
|
u = 'kk792f28d6ff1f34ec702c08626d454b39pro'
|
||||||
|
|
||||||
|
input_str = f"web{s}{a}{u}"
|
||||||
|
md5_hash = hashlib.md5(input_str.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
return {
|
||||||
|
'timestamp': a,
|
||||||
|
'imei': s,
|
||||||
|
'requestId': md5_hash,
|
||||||
|
'inputString': input_str
|
||||||
|
}
|
||||||
|
|
||||||
|
sign_data = calculate_sign(ua_type='pc')
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
|
||||||
|
'Chrome/138.0.0.0 Safari/537.36 Edg/138.0.0.0',
|
||||||
|
'mobileModel': 'web',
|
||||||
|
'timestamp': str(sign_data['timestamp']),
|
||||||
|
'loginType': '2',
|
||||||
|
'versionCode': '10003',
|
||||||
|
'imei': sign_data['imei'],
|
||||||
|
'requestId': sign_data['requestId'],
|
||||||
|
'channel': '9',
|
||||||
|
'version': '1.0.0',
|
||||||
|
'os': 'web',
|
||||||
|
'platform': 'WEB',
|
||||||
|
'Origin': 'https://www.imkktv.com',
|
||||||
|
'Referer': 'https://www.imkktv.com/',
|
||||||
|
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
|
||||||
|
}
|
||||||
|
|
||||||
|
if cookies:
|
||||||
|
headers['cookie'] = cookies
|
||||||
|
|
||||||
|
pattern = r"(?:roomId|anchorId)=(.*?)(?=&|$)"
|
||||||
|
match = re.search(pattern, url)
|
||||||
|
room_id = match.group(1) if match else ''
|
||||||
|
play_api = f'https://api.imkktv.com/liveroom/getShareLiveVideo?roomId={room_id}'
|
||||||
|
json_str = await async_req(play_api, proxy_addr=proxy_addr, headers=headers)
|
||||||
|
json_data = json.loads(json_str)
|
||||||
|
|
||||||
|
room_data = json_data['data']
|
||||||
|
anchor_name = room_data['nickname']
|
||||||
|
live_status = room_data['playStatus'] == 0
|
||||||
|
|
||||||
|
result = {"anchor_name": anchor_name, "is_live": False}
|
||||||
|
if live_status:
|
||||||
|
flv_url = room_data['playUrl']
|
||||||
|
result |= {'is_live': True, 'flv_url': flv_url, 'record_url': flv_url}
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@trace_error_decorator
|
||||||
|
async def get_picarto_stream_url(url: str, proxy_addr: OptionalStr = None, cookies: OptionalStr = None) -> dict:
|
||||||
|
headers = {
|
||||||
|
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
|
||||||
|
'Chrome/121.0.0.0 Safari/537.36 Edg/121.0.0.0',
|
||||||
|
'accept-language': 'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
|
||||||
|
}
|
||||||
|
|
||||||
|
if cookies:
|
||||||
|
headers['cookie'] = cookies
|
||||||
|
|
||||||
|
anchor_id = url.split('?')[0].rsplit('/', maxsplit=1)[-1]
|
||||||
|
api = f'https://ptvintern.picarto.tv/api/channel/detail/{anchor_id}'
|
||||||
|
|
||||||
|
json_str = await async_req(api, proxy_addr=proxy_addr, headers=headers)
|
||||||
|
json_data = json.loads(json_str)
|
||||||
|
|
||||||
|
anchor_name = json_data['channel']['name']
|
||||||
|
live_status = json_data['channel']['online']
|
||||||
|
|
||||||
|
result = {"anchor_name": anchor_name, "is_live": live_status}
|
||||||
|
if live_status:
|
||||||
|
title = json_data['channel']['title']
|
||||||
|
m3u8_url = f"https://1-edge1-us-newyork.picarto.tv/stream/hls/golive+{anchor_name}/index.m3u8"
|
||||||
|
result |= {'is_live': True, 'title': title, 'm3u8_url': m3u8_url, 'record_url': m3u8_url}
|
||||||
return result
|
return result
|
||||||
@ -21,6 +21,7 @@ from .utils import trace_error_decorator
|
|||||||
from .spider import (
|
from .spider import (
|
||||||
get_douyu_stream_data, get_bilibili_stream_data
|
get_douyu_stream_data, get_bilibili_stream_data
|
||||||
)
|
)
|
||||||
|
from .http_clients.async_http import get_response_status
|
||||||
|
|
||||||
QUALITY_MAPPING = {"OD": 0, "BD": 0, "UHD": 1, "HD": 2, "SD": 3, "LD": 4}
|
QUALITY_MAPPING = {"OD": 0, "BD": 0, "UHD": 1, "HD": 2, "SD": 3, "LD": 4}
|
||||||
|
|
||||||
@ -37,7 +38,7 @@ def get_quality_index(quality) -> tuple:
|
|||||||
|
|
||||||
|
|
||||||
@trace_error_decorator
|
@trace_error_decorator
|
||||||
async def get_douyin_stream_url(json_data: dict, video_quality: str) -> dict:
|
async def get_douyin_stream_url(json_data: dict, video_quality: str, proxy_addr: str) -> dict:
|
||||||
anchor_name = json_data.get('anchor_name')
|
anchor_name = json_data.get('anchor_name')
|
||||||
|
|
||||||
result = {
|
result = {
|
||||||
@ -61,6 +62,11 @@ async def get_douyin_stream_url(json_data: dict, video_quality: str) -> dict:
|
|||||||
video_quality, quality_index = get_quality_index(video_quality)
|
video_quality, quality_index = get_quality_index(video_quality)
|
||||||
m3u8_url = m3u8_url_list[quality_index]
|
m3u8_url = m3u8_url_list[quality_index]
|
||||||
flv_url = flv_url_list[quality_index]
|
flv_url = flv_url_list[quality_index]
|
||||||
|
ok = await get_response_status(url=m3u8_url, proxy_addr=proxy_addr)
|
||||||
|
if not ok:
|
||||||
|
index = quality_index + 1 if quality_index < 4 else quality_index - 1
|
||||||
|
m3u8_url = m3u8_url_list[index]
|
||||||
|
flv_url = flv_url_list[index]
|
||||||
result |= {
|
result |= {
|
||||||
'is_live': True,
|
'is_live': True,
|
||||||
'title': json_data['title'],
|
'title': json_data['title'],
|
||||||
@ -73,7 +79,7 @@ async def get_douyin_stream_url(json_data: dict, video_quality: str) -> dict:
|
|||||||
|
|
||||||
|
|
||||||
@trace_error_decorator
|
@trace_error_decorator
|
||||||
async def get_tiktok_stream_url(json_data: dict, video_quality: str) -> dict:
|
async def get_tiktok_stream_url(json_data: dict, video_quality: str, proxy_addr: str) -> dict:
|
||||||
if not json_data:
|
if not json_data:
|
||||||
return {"anchor_name": None, "is_live": False}
|
return {"anchor_name": None, "is_live": False}
|
||||||
|
|
||||||
@ -81,10 +87,18 @@ async def get_tiktok_stream_url(json_data: dict, video_quality: str) -> dict:
|
|||||||
play_list = []
|
play_list = []
|
||||||
for key in stream:
|
for key in stream:
|
||||||
url_info = stream[key]['main']
|
url_info = stream[key]['main']
|
||||||
play_url = url_info[q_key]
|
|
||||||
sdk_params = url_info['sdk_params']
|
sdk_params = url_info['sdk_params']
|
||||||
sdk_params = json.loads(sdk_params)
|
sdk_params = json.loads(sdk_params)
|
||||||
vbitrate = int(sdk_params['vbitrate'])
|
vbitrate = int(sdk_params['vbitrate'])
|
||||||
|
v_codec = sdk_params.get('VCodec', '')
|
||||||
|
|
||||||
|
play_url = ''
|
||||||
|
if url_info.get(q_key):
|
||||||
|
if url_info[q_key].endswith(".flv") or url_info[q_key].endswith(".m3u8"):
|
||||||
|
play_url = url_info[q_key] + '?codec=' + v_codec
|
||||||
|
else:
|
||||||
|
play_url = url_info[q_key] + '&codec=' + v_codec
|
||||||
|
|
||||||
resolution = sdk_params['resolution']
|
resolution = sdk_params['resolution']
|
||||||
if vbitrate != 0 and resolution:
|
if vbitrate != 0 and resolution:
|
||||||
width, height = map(int, resolution.split('x'))
|
width, height = map(int, resolution.split('x'))
|
||||||
@ -115,8 +129,19 @@ async def get_tiktok_stream_url(json_data: dict, video_quality: str) -> dict:
|
|||||||
while len(m3u8_url_list) < 5:
|
while len(m3u8_url_list) < 5:
|
||||||
m3u8_url_list.append(m3u8_url_list[-1])
|
m3u8_url_list.append(m3u8_url_list[-1])
|
||||||
video_quality, quality_index = get_quality_index(video_quality)
|
video_quality, quality_index = get_quality_index(video_quality)
|
||||||
flv_url = flv_url_list[quality_index]['url'].replace("https://", "http://")
|
flv_dict: dict = flv_url_list[quality_index]
|
||||||
m3u8_url = m3u8_url_list[quality_index]['url'].replace("https://", "http://")
|
m3u8_dict: dict = m3u8_url_list[quality_index]
|
||||||
|
|
||||||
|
check_url = m3u8_dict.get('url') or flv_dict.get('url')
|
||||||
|
ok = await get_response_status(url=check_url, proxy_addr=proxy_addr, http2=False)
|
||||||
|
|
||||||
|
if not ok:
|
||||||
|
index = quality_index + 1 if quality_index < 4 else quality_index - 1
|
||||||
|
flv_dict: dict = flv_url_list[index]
|
||||||
|
m3u8_dict: dict = m3u8_url_list[index]
|
||||||
|
|
||||||
|
flv_url = flv_dict['url']
|
||||||
|
m3u8_url = m3u8_dict['url']
|
||||||
result |= {
|
result |= {
|
||||||
'is_live': True,
|
'is_live': True,
|
||||||
'title': live_room['liveRoom']['title'],
|
'title': live_room['liveRoom']['title'],
|
||||||
|
|||||||
13
src/utils.py
13
src/utils.py
@ -10,6 +10,7 @@ import hashlib
|
|||||||
import re
|
import re
|
||||||
import traceback
|
import traceback
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
from urllib.parse import parse_qs, urlparse
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
import execjs
|
import execjs
|
||||||
from .logger import logger
|
from .logger import logger
|
||||||
@ -191,3 +192,15 @@ def replace_url(file_path: str | Path, old: str, new: str) -> None:
|
|||||||
if old in content:
|
if old in content:
|
||||||
with open(file_path, 'w', encoding='utf-8-sig') as f:
|
with open(file_path, 'w', encoding='utf-8-sig') as f:
|
||||||
f.write(content.replace(old, new))
|
f.write(content.replace(old, new))
|
||||||
|
|
||||||
|
|
||||||
|
def get_query_params(url: str, param_name: OptionalStr) -> dict | list[str]:
|
||||||
|
parsed_url = urlparse(url)
|
||||||
|
query_params = parse_qs(parsed_url.query)
|
||||||
|
|
||||||
|
if param_name is None:
|
||||||
|
return query_params
|
||||||
|
else:
|
||||||
|
values = query_params.get(param_name, [])
|
||||||
|
return values
|
||||||
|
|
||||||
Loading…
x
Reference in New Issue
Block a user