Compare commits

...

120 Commits
v3.0.9 ... main

Author SHA1 Message Date
vain-Liang
add187f8d8
feat: basic support for running with uv (#1265) 2025-11-03 19:55:52 +08:00
ihmily
0333cb4a01 optimize douyin live error catch 2025-10-25 15:17:50 +08:00
ihmily
73857755a7 fix tiktok parse 2025-10-24 18:59:45 +08:00
ihmily
fec734ae74 docs: update readme 2025-10-24 16:04:08 +08:00
ihmily
853d03ea14 fix: update bigo match 2025-10-24 10:53:21 +08:00
ihmily
2fb7f7afd7 feat: add sooplive.com support 2025-10-24 10:43:46 +08:00
ihmily
200e5b5b58 fix douyin stream fetch 2025-10-23 19:55:56 +08:00
Ovear
abb204e6e9
fix: graceful exit when disk usage threshold is reached (#1239) 2025-10-23 19:45:22 +08:00
ihmily
271a53621d fix: update TTinglive 2025-09-01 21:50:06 +08:00
ihmily
d77760f3c9 update readme 2025-09-01 18:21:52 +08:00
ihmily
9c913e23cc update config 2025-09-01 17:48:42 +08:00
ihmily
af37bf28f0 fix: update title fetch 2025-09-01 16:52:56 +08:00
ihmily
d4796409c7 feat: add direct downloader 2025-09-01 16:29:13 +08:00
ihmily
93a12ab41d feat: add direct downloader 2025-09-01 16:10:16 +08:00
ihmily
525b720627 fix live audio record 2025-08-28 18:05:11 +08:00
Hmily
e9f2a55ceb
feat: add laixiu sign js (#1195) 2025-08-28 17:08:15 +08:00
Hmily
3965487746
fix bigo room_id parse and twitcasting login (#1194) 2025-08-28 17:03:10 +08:00
Hmily
e80f1e653a
fix: update liveme room id match 2025-08-27 18:15:45 +08:00
Hmily
63787f1743
fix: update liveme room id match (#1192) 2025-08-27 18:10:28 +08:00
Hmily
a328c6a1c5
fix douyin stream fetch (#1190) 2025-08-27 13:55:22 +08:00
Hmily
be2c3a393f
fix: update flextv endpoint address (#1185) 2025-08-22 18:36:48 +08:00
Hmily
c7e3cf47ce
fix: update twitcast live parse (#1177) 2025-08-12 16:11:36 +08:00
Hmily
199186fb09
fix: update weibo live parse 2025-08-05 19:03:45 +08:00
Hmily
5778ebc4b3
fix: update migu live parse (#1165) 2025-08-02 19:41:24 +08:00
COYG⚡️
d9f985303a
Update LICENSE (#1160) 2025-07-30 15:46:12 +08:00
逆行时光
110d5bded4
feat: add push tunnel: pushplus (#1156) 2025-07-30 09:32:01 +08:00
ihmily
e478d72e62 fix: optimize douyin app short link parse 2025-07-24 16:53:07 +08:00
Hmily
bcfc268c1c
feat: add picarto live (#1149) 2025-07-22 11:51:44 +08:00
ihmily
8e4e9b098f feat: add vcodec flag for tiktok and douyin 2025-07-19 19:06:27 +08:00
ihmily
9f499f3fa6 feat: add lianjie and laixiu live 2025-07-19 17:45:19 +08:00
ihmily
ae8200e01c feat: add migu live record 2025-07-04 17:28:44 +08:00
ihmily
952eeb9b7c fix: handle twitch live record ad issues 2025-06-30 21:53:23 +08:00
727155455
ba8979e4ee
fix script input parameters (#1104) 2025-06-16 20:20:17 +08:00
ihmily
c157c08e5a style: fix lint issues 2025-06-14 14:27:55 +08:00
ihmily
effcfcc76f fix: update bilibili live data fetch 2025-06-13 19:00:31 +08:00
Hmily
d787838ed2
up 2025-06-11 13:36:36 +08:00
Hmily
86a822f0db
Update build-image.yml 2025-06-06 18:58:39 +08:00
ihmily
51f79d70f8 fix: update 2025-06-05 16:06:12 +08:00
ihmily
d37d9f25ad fix: update maoerfm live record 2025-06-05 14:19:53 +08:00
ihmily
c40a10235d fix: update blued stream fetch 2025-06-05 11:58:08 +08:00
ihmily
d860023808 fix: update bilibili live room data fetch 2025-06-05 11:41:52 +08:00
ihmily
57739410f8 fix: update taobao live stream fetch 2025-06-04 18:20:30 +08:00
ihmily
9bc629c1b0 fix: update douyin profile link parse 2025-06-04 15:38:16 +08:00
ihmily
2dd9c42767 fix: update douyin profile link parse 2025-06-04 15:31:59 +08:00
ihmily
fd06bc89da fix: update popkontv live stream fetch 2025-05-30 18:55:47 +08:00
ihmily
019b30203e fix: update rednote live stream fetch 2025-05-30 18:40:51 +08:00
ihmily
0bd2a3a360 refactor: Refactor code structure 2025-05-30 17:12:59 +08:00
某时橙
4fa3fbb773
fix: print flush (#957) 2025-03-20 18:50:01 +08:00
咳咳
ba046660fc
fix: huya prioritizes using TX CDN (#993) 2025-03-20 16:27:18 +08:00
ihmily
f73ce7b68e fix: update huya live record 2025-02-08 19:20:09 +08:00
ihmily
9e494f4377 fix: update flextv live stream fetch 2025-02-08 18:00:52 +08:00
ihmily
151a6a45c9 fix: fix force https record 2025-02-06 18:41:06 +08:00
ihmily
807f4b758e fix: update netease cc live record 2025-02-06 02:37:06 +08:00
ihmily
bf7381bf6c refactor: convert asynchronously func and add readme_pypi 2025-02-05 22:44:21 +08:00
ihmily
d055602e81 refactor: update quality map 2025-02-05 21:34:32 +08:00
ihmily
ef97e01dba refactor: update quality map 2025-02-05 20:52:36 +08:00
ihmily
e189640d3a refactor: update quality map 2025-02-05 20:31:43 +08:00
ihmily
7bb778a875 refactor: rename package from douyinliverecorder to streamget 2025-02-05 11:56:16 +08:00
ihmily
99ea143c78 chore: update httpx dependencies 2025-02-04 07:03:34 +08:00
ihmily
52c0287150 perf: add http2 tcp request 2025-02-04 06:54:14 +08:00
ihmily
246632de78 refactor: optimize code structure 2025-02-04 06:39:58 +08:00
ihmily
71fbf93ffe fix: update request proxy 2025-01-27 22:14:39 +08:00
ihmily
a61bdc852e docs: update readme 2025-01-27 15:40:26 +08:00
ihmily
a5d567092d fix: update huya live parse 2025-01-27 14:03:35 +08:00
ihmily
272e2dd28e fix: update httpx request proxy params 2025-01-27 13:20:21 +08:00
ihmily
c9b2310fa4 fix: update ffmpeg install 2025-01-26 00:07:30 +08:00
ihmily
46842c3b48 fix 2025-01-25 19:52:43 +08:00
ihmily
736943cbca perf: add mp4 video recode h264 and update ffmpeg version 2025-01-25 19:46:37 +08:00
ihmily
5d3c295c7a fix 2025-01-25 16:19:42 +08:00
ihmily
8c30787353 feat: add smtp port and smtp ssl config 2025-01-25 15:03:23 +08:00
ihmily
73d9ee1334 fix: update vvxq live record 2025-01-25 14:25:43 +08:00
ihmily
2fa830307f feat: add faceit live record 2025-01-25 13:41:50 +08:00
ihmily
bbb0c5ebaa chore: update dependencies to latest versions 2025-01-24 19:29:22 +08:00
ihmily
fa92a4196f fix: update xiaohongshu stream fetch 2025-01-24 12:22:35 +08:00
ihmily
29a16ba00b fix: update flextv live record 2025-01-23 23:20:16 +08:00
ihmily
d5de8bd77b fix: update xiaohongshu live record 2025-01-23 22:27:03 +08:00
ihmily
0751107ae1 refactor: convert project to asynchronous operations 2025-01-23 20:41:46 +08:00
ihmily
c637e5617c chore: update gitgnore 2025-01-23 17:26:36 +08:00
ihmily
a00305cafb fix: update wechat message push 2025-01-23 17:16:59 +08:00
ihmily
c9f26b116d fix: update xhs stream url rule 2025-01-23 17:14:20 +08:00
ihmily
4181e9745a feat: add taobao sign js 2025-01-23 16:00:33 +08:00
justdoiting
1a8f2fe9bd
fix: update changliao live room url rule (#861) 2025-01-23 15:41:06 +08:00
ihmily
0c97d0612f feat: add jd live record 2024-12-03 19:20:04 +08:00
ihmily
80f88f6420 fix: add mp4 converts error handle 2024-12-02 19:57:25 +08:00
ihmily
9835374174 feat: add taobao live record 2024-12-02 17:51:24 +08:00
ihmily
2ca2fdf627 docs: update v4.0.1 2024-11-30 23:34:59 +08:00
ihmily
35bfca0a7a docs: update v4.0.1 2024-11-30 23:07:43 +08:00
ihmily
4b692c0f77 docs: update readme 2024-11-30 21:37:55 +08:00
ihmily
7a07a5282d feat: add youtube live recprd 2024-11-30 21:27:20 +08:00
Hmily
2729fcec46
update ffmpeg_install.py 2024-11-30 15:03:29 +08:00
ihmily
eeeed2fc5f refactor: add ffmpeg automatic installation 2024-11-29 19:59:11 +08:00
ihmily
b963d8ac6f refactor: simplify code structure and reduce redundancy 2024-11-29 15:29:14 +08:00
Vincent Qiu (HOHO``)
7cbd995591
fix: kuaishou json response changed (#808) 2024-11-29 11:22:46 +08:00
Horatio
b3449093bb
feat: add dingtalk atall (#795) 2024-11-21 13:41:42 +08:00
ihmily
14552afcdf perf: add system proxy check 2024-11-19 15:40:45 +08:00
ihmily
645f2ea782 perf: add stream link handle 2024-11-18 21:00:43 +08:00
ihmily
38b423504d fix: update huajiao live record 2024-11-18 16:10:46 +08:00
ihmily
3db010315f fix: update xhs and shopee live record 2024-11-16 18:51:25 +08:00
ihmily
ad0b6c72dd fix: update xhs live record 2024-11-16 04:31:31 +08:00
ihmily
463fa44756 fix: update xhs live record 2024-11-16 04:09:14 +08:00
ihmily
6657374db3 fix: update shopee live record 2024-11-15 19:06:53 +08:00
ihmily
9d292cf865 fix: update shopee live record 2024-11-15 11:51:00 +08:00
ihmily
310390b3c2 fix: delete duplicate live url 2024-11-14 11:16:09 +08:00
ihmily
d2d34ceae7 fix: update host parse 2024-11-14 11:01:15 +08:00
ihmily
4de6bae1fa feat: add custom script execution 2024-11-14 00:46:43 +08:00
ihmily
0bf1550884 fix: update bash script execution 2024-11-13 19:45:31 +08:00
ihmily
461489290a fix: update user-agent for yy live record 2024-11-12 17:50:08 +08:00
ihmily
92f5fbf4b8 fix: change user-agent for bilibili-h5 2024-11-09 15:38:35 +08:00
ihmily
b3f110e3ec feat: add shopee live record 2024-11-09 03:21:14 +08:00
ihmily
2e6e699fdb docs: update readme 2024-11-08 19:27:51 +08:00
ihmily
a95e0a8849 feat: add internationalization support 2024-11-08 19:16:00 +08:00
ihmily
240f0e2400 fix: Simplify backup file logic and remove redundant code blocks 2024-11-08 14:00:09 +08:00
ihmily
37f7ad0048 feat: add stop recording space threshold configuration 2024-11-07 01:49:22 +08:00
ihmily
3e2772a3d7 feat: add stop recording space threshold configuration 2024-11-07 01:25:42 +08:00
ihmily
826a3a37e7 feat: add normal hls and flv url record 2024-11-06 23:10:50 +08:00
Hmily
ce6285c1b9
docs: update readme 2024-11-06 00:30:11 +08:00
ihmily
4946b49e95 docs: update readme 2024-11-04 22:16:46 +08:00
ihmily
f03f39ca85 fix: update log segment config 2024-11-04 22:12:17 +08:00
ihmily
14e4c530a2 fix: add vvxqiu anchor name fetch method 2024-11-04 20:46:31 +08:00
ihmily
19fb6a2b20 fix: add default nickname for blank nickname 2024-10-30 11:03:32 +08:00
39 changed files with 4408 additions and 1775 deletions

View File

@ -4,45 +4,51 @@ on:
push:
tags:
- '*'
workflow_dispatch:
inputs:
tag_name:
description: 'Tag name for the Docker image'
required: false
default: 'latest'
jobs:
build_and_push:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Checkout code
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Cache Docker layers
uses: actions/cache@v3
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Log in to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
registry: docker.io
- name: Log in to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
registry: docker.io
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
file: ./Dockerfile
push: true
tags: |
ihmily/douyin-live-recorder:${{ github.ref_name }}
ihmily/douyin-live-recorder:latest
platforms: linux/amd64,linux/arm64
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
file: ./Dockerfile
push: true
tags: |
ihmily/douyin-live-recorder:${{ github.event.inputs.tag_name || github.ref_name }}
ihmily/douyin-live-recorder:latest
platforms: linux/amd64,linux/arm64
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache

9
.gitignore vendored
View File

@ -52,7 +52,6 @@ coverage.xml
cover/
# Translations
*.mo
*.pot
# Django stuff:
@ -82,10 +81,16 @@ target/
profile_default/
ipython_config.py
# DouyinLiveRecord
backup_config/
logs/
node/
node-v*.zip
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.

View File

@ -1,6 +1,6 @@
MIT License
Copyright (c) 2023 Hmily
Copyright (c) 2025 Hmily
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

249
README.md
View File

@ -31,7 +31,7 @@
- [x] 猫耳FM
- [x] Look直播
- [x] WinkTV
- [x] FlexTV
- [x] TTingLive(原Flextv)
- [x] PopkonTV
- [x] TwitCasting
- [x] 百度直播
@ -56,6 +56,15 @@
- [x] 六间房直播
- [x] 乐嗨直播
- [x] 花猫直播
- [x] Shopee
- [x] Youtube
- [x] 淘宝
- [x] 京东
- [x] Faceit
- [x] 咪咕
- [x] 连接直播
- [x] 来秀直播
- [x] Picarto
- [ ] 更多平台正在更新中
</div>
@ -75,8 +84,10 @@
├── utils.py -> (contains utility functions)
├── logger.py -> (logger handdle)
├── room.py -> (get room info)
├── ab_sign.py-> (generate dy token)
├── /javascript -> (some decrypt code)
├── main.py -> (main file)
├── ffmpeg_install.py -> (ffmpeg install script)
├── demo.py -> (call package test demo)
├── msg_push.py -> (send live status update message)
├── ffmpeg.exe -> (record video)
@ -113,55 +124,53 @@
直播间链接示例:
```
抖音
抖音:
https://live.douyin.com/745964462470
https://v.douyin.com/iQFeBnt/
https://live.douyin.com/yall1102 (链接+抖音号)
https://v.douyin.com/CeiU5cbX (主播主页地址)
TikTok
TikTok:
https://www.tiktok.com/@pearlgaga88/live
快手
快手:
https://live.kuaishou.com/u/yall1102
虎牙
虎牙:
https://www.huya.com/52333
斗鱼
斗鱼:
https://www.douyu.com/3637778?dyshid=
https://www.douyu.com/topic/wzDBLS6?rid=4921614&dyshid=
YY:
https://www.yy.com/22490906/22490906
B站
B站:
https://live.bilibili.com/320
小红书
小红书(直播间分享地址):
http://xhslink.com/xpJpfM
https://www.xiaohongshu.com/hina/livestream/569077534207413574/1707413727088?appuid=5f3f478a00000000010005b3&
bigo直播
bigo直播:
https://www.bigo.tv/cn/716418802
buled直播
buled直播:
https://app.blued.cn/live?id=Mp6G2R
SOOP[AfreecaTV]
SOOP:
https://play.sooplive.co.kr/sw7love
https://play.afreecatv.com/sw7love
网易cc
网易cc:
https://cc.163.com/583946984
千度热播
千度热播:
https://qiandurebo.com/web/video.php?roomnumber=33333
PandaTV
PandaTV:
https://www.pandalive.co.kr/live/play/bara0109
猫耳FM
猫耳FM:
https://fm.missevan.com/live/868895007
Look直播:
@ -170,7 +179,7 @@ https://look.163.com/live?id=65108820&position=3
WinkTV:
https://www.winktv.co.kr/live/play/anjer1004
FlexTV:
FlexTV(TTinglive)::
https://www.flextv.co.kr/channels/593127/live
PopkonTV:
@ -207,10 +216,10 @@ https://www.showroom-live.com/room/profile?room_id=480206 (主播主页地址
Acfun:
https://live.acfun.cn/live/179922
映客直播
映客直播:
https://www.inke.cn/liveroom/index.html?uid=22954469&id=1720860391070904
音播直播
音播直播:
https://live.ybw1666.com/800002949
知乎直播:
@ -232,7 +241,7 @@ https://17.live/en/live/6302408
https://www.lang.live/en-US/room/3349463
畅聊直播:
https://www.tlclw.com/801044397
https://live.tlclw.com/106188
飘飘直播:
https://m.pp.weimipopo.com/live/preview.html?uid=91648673&anchorUid=91625862&app=plpl
@ -245,12 +254,40 @@ https://www.lehaitv.com/8059096
花猫直播:
https://h.catshow168.com/live/preview.html?uid=19066357&anchorUid=18895331
Shopee:
https://sg.shp.ee/GmpXeuf?uid=1006401066&session=802458
Youtube:
https://www.youtube.com/watch?v=cS6zS5hi1w0
淘宝(需cookie):
https://tbzb.taobao.com/live?liveId=532359023188
https://m.tb.cn/h.TWp0HTd
京东:
https://3.cn/28MLBy-E
Faceit:
https://www.faceit.com/zh/players/Compl1/stream
连接直播:
https://show.lailianjie.com/10000258
咪咕直播:
https://www.miguvideo.com/p/live/120000541321
来秀直播:
https://www.imkktv.com/h5/share/video.html?uid=1845195&roomId=1710496
Picarto:
https://www.picarto.tv/cuteavalanche
```
&emsp;
## 🎃源码运行
使用源码运行,前提要有**Python>=3.10**环境如果没有请先自行安装Python再执行下面步骤。
使用源码运行,可参考下面的步骤。
1.首先拉取或手动下载本仓库项目代码
@ -262,9 +299,94 @@ git clone https://github.com/ihmily/DouyinLiveRecorder.git
```bash
cd DouyinLiveRecorder
pip3 install -r requirements.txt
```
> [!TIP]
> - 不论你是否已安装 **Python>=3.10** 环境, 都推荐使用 [**uv**](https://github.com/astral-sh/uv) 运行, 因为它可以自动管理虚拟环境和方便地管理 **Python** 版本, **不过这完全是可选的**<br />
> 使用以下命令安装
> ```bash
> # 在 macOS 和 Linux 上安装 uv
> curl -LsSf https://astral.sh/uv/install.sh | sh
> ```
> ```powershell
> # 在 Windows 上安装 uv
> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"
> ```
> - 如果安装依赖速度太慢, 你可以考虑使用国内 pip 镜像源:<br />
> 在 `pip` 命令使用 `-i` 参数指定, 如 `pip3 install -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple`<br />
> 或者在 `uv` 命令 `--index` 选项指定, 如 `uv sync --index https://pypi.tuna.tsinghua.edu.cn/simple`
<details>
<summary>如果已安装 <b>Python>=3.10</b> 环境</summary>
- :white_check_mark: 在虚拟环境中安装 (推荐)
1. 创建虚拟环境
- 使用系统已安装的 Python, 不使用 uv
```bash
python -m venv .venv
```
- 使用 uv, 默认使用系统 Python, 你可以添加 `--python` 选项指定 Python 版本而不使用系统 Python [uv官方文档](https://docs.astral.sh/uv/concepts/python-versions/)
```bash
uv venv
```
2. 在终端激活虚拟环境 (在未安装 uv 或你想要手动激活虚拟环境时执行, 若已安装 uv, 可以跳过这一步, uv 会自动激活并使用虚拟环境)
**Bash**
```bash
source .venv/Scripts/activate
```
**Powershell**
```powershell
.venv\Scripts\activate.ps1
```
**Windows CMD**
```bat
.venv\Scripts\activate.bat
```
3. 安装依赖
```bash
# 使用 pip (若安装太慢或失败, 可使用 `-i` 指定镜像源)
pip3 install -U pip && pip3 install -r requirements.txt
# 或者使用 uv (可使用 `--index` 指定镜像源)
uv sync
# 或者
uv pip sync requirements.txt
```
- :x: 在系统 Python 环境中安装 (不推荐)
```bash
pip3 install -U pip && pip3 install -r requirements.txt
```
</details>
<details>
<summary>如果未安装 <b>Python>=3.10</b> 环境</summary>
你可以使用 [**uv**](https://github.com/astral-sh/uv) 安装依赖
```bash
# uv 将使用 3.10 及以上的最新 python 发行版自动创建并使用虚拟环境, 可使用 --python 选项指定 python 版本, 参见 https://docs.astral.sh/uv/reference/cli/#uv-sync--python 和 https://docs.astral.sh/uv/reference/cli/#uv-pip-sync--python
uv sync
# 或
uv pip sync requirements.txt
```
</details>
3.安装[FFmpeg](https://ffmpeg.org/download.html#build-linux)如果是Windows系统这一步可跳过。对于Linux系统执行以下命令安装
CentOS执行
@ -297,6 +419,12 @@ brew install ffmpeg
```python
python main.py
```
```bash
uv run main.py
```
其中Linux系统请使用`python3 main.py` 运行。
@ -320,7 +448,7 @@ docker-compose up
2.构建镜像(可选)
如果你只想简单的运行程序,则不需要做这一步。要自定义本地构建,可以修改 [docker-compose.yaml](https://github.com/ihmily/DouyinLiveRecorder/blob/main/docker-compose.yaml) 文件,如将镜像名修改为 `douyin-live-recorder:latest`,并取消 `# build: .` 注释,然后再执行
如果你只想简单的运行程序,则不需要做这一步。Docker镜像仓库中代码版本可能不是最新的如果要运行本仓库主分支最新代码可以本地自定义构建通过修改 [docker-compose.yaml](https://github.com/ihmily/DouyinLiveRecorder/blob/main/docker-compose.yaml) 文件,如将镜像名修改为 `douyin-live-recorder:latest`,并取消 `# build: .` 注释,然后再执行
```bash
docker build -t douyin-live-recorder:latest .
@ -353,6 +481,13 @@ docker-compose stop
&emsp;
## 🤖相关项目
- StreamCap: https://github.com/ihmily/StreamCap
- streamget: https://github.com/ihmily/streamget
&emsp;
## ❤️贡献者
&ensp;&ensp; [![Hmily](https://github.com/ihmily.png?size=50)](https://github.com/ihmily)
@ -369,10 +504,41 @@ docker-compose stop
[![wujiyu115](https://github.com/wujiyu115.png?size=50)](https://github.com/wujiyu115)
[![zhanghao333](https://github.com/zhanghao333.png?size=50)](https://github.com/zhanghao333)
<a href="https://github.com/gyc0123" target="_blank"><img src="https://github.com/gyc0123.png?size=50" alt="gyc0123" style="width:53px; height:51px;" /></a>
&ensp;&ensp; [![HoratioShaw](https://github.com/HoratioShaw.png?size=50)](https://github.com/HoratioShaw)
[![nov30th](https://github.com/nov30th.png?size=50)](https://github.com/nov30th)
[![727155455](https://github.com/727155455.png?size=50)](https://github.com/727155455)
[![nixingshiguang](https://github.com/nixingshiguang.png?size=50)](https://github.com/nixingshiguang)
[![1411430556](https://github.com/1411430556.png?size=50)](https://github.com/1411430556)
[![Ovear](https://github.com/Ovear.png?size=50)](https://github.com/Ovear)
&emsp;
## ⏳提交日志
- 20251024
- 修复抖音风控无法获取数据问题
- 新增soop.com录制支持
- 修复bigo录制
- 20250127
- 新增淘宝、京东、faceit直播录制
- 修复小红书直播流录制以及转码问题
- 修复畅聊、VV星球、flexTV直播录制
- 修复批量微信直播推送
- 新增email发送ssl和port配置
- 新增强制转h264配置
- 更新ffmpeg版本
- 重构包为异步函数!
- 20241130
- 新增shopee、youtube直播录制
- 新增支持自定义m3u8、flv地址录制
- 新增自定义执行脚本支持python、bat、bash等
- 修复YY直播、花椒直播和小红书直播录制
- 修复b站标题获取错误
- 修复log日志错误
- 20241030
- 新增嗨秀直播、vv星球直播、17Live、浪Live、SOOP、畅聊直播(原时光直播)、飘飘直播、六间房直播、乐嗨直播、花猫直播等10个平台直播录制
- 修复小红书直播录制,支持小红书作者主页地址录制直播
@ -396,7 +562,6 @@ docker-compose stop
- 新增时光直播录制
- 20240701
- 修复虎牙直播录制2分钟断流问题
- 新增自定义直播推送内容
- 20240621
- 新增Acfun、ShowRoom直播录制
@ -409,13 +574,10 @@ docker-compose stop
- 修复部分虎牙直播间录制错误
- 20240508
- 修复花椒直播录制
- 更改文件路径解析方式 [@kaine1973](https://github.com/kaine1973)
- 20240506
- 修复抖音录制画质解析bug
- 修复虎牙录制 60帧最高画质问题
- 新增流星直播录制
- 20240427
- 新增LiveMe、花椒直播录制
@ -425,13 +587,10 @@ docker-compose stop
- 新增酷狗直播录制、优化PopkonTV直播录制
- 20240423
- 新增百度直播录制、微博直播录制
- 修复斗鱼录制直播回放的问题
- 新增直播源地址显示以及输出到日志文件设置
- 20240311
- 修复海外平台录制bug增加画质选择增强录制稳定性
- 修复虎牙录制bug (虎牙`一起看`频道 有特殊限制,有时无法录制)
- 20240309
- 修复虎牙直播、小红书直播和B站直播录制
@ -445,37 +604,29 @@ docker-compose stop
- 修复了小红书直播因官方更新直播域名,导致无法录制直播的问题
- 修复了更新URL配置文件的bug
- 最后,祝大家新年快乐!
<details><summary>点击展开更多提交日志</summary>
- 20240129
- 新增猫耳FM直播录制
- 20240127
- 新增千度热播直播录制、新增pandaTV(韩国)直播录制
- 新增telegram直播状态消息推送修复了某些bug
- 新增自定义设置不同直播间的录制画质(即每个直播间录制画质可不同)
- 修改录制视频保存路径为 `downloads` 文件夹,并且分平台进行保存。
- 20240114
- 新增网易cc直播录制优化ffmpeg参数修改AfreecaTV输入直播地址格式
- 修改日志记录器 @[iridescentGray](https://github.com/iridescentGray)
- 20240102
- 修复Linux上运行新增docker配置文件
- 20231210
- 修复录制分段bug修复bigo录制检测bug
- 新增自定义修改录制主播名
- 新增AfreecaTV直播录制修复某些可能会发生的bug
- 20231207
- 新增blued直播录制修复YY直播录制新增直播结束消息推送
- 20231206
- 新增bigo直播录制
- 20231203
- 新增小红书直播录制(全网首发),目前小红书官方没有切换清晰度功能,因此直播录制也只有默认画质
- 小红书录制暂时无法循环监测,每次主播开启直播,都要重新获取一次链接
@ -485,18 +636,14 @@ docker-compose stop
- 欢迎各位大佬提pr 帮忙更新维护
- 20230930
- 新增抖音从接口获取直播流,增强稳定性
- 修改快手获取直播流的方式,改用从官方接口获取
- 祝大家中秋节快乐!
- 20230919
- 修复了快手版本更新后录制出错的问题增加了其自动获取cookie(~~稳定性未知~~)
- 修复了TikTok显示正在直播但不进行录制的问题
- 20230907
- 修复了因抖音官方更新了版本导致的录制出错以及短链接转换出错
- 修复B站无法录制原画视频的bug
- 修改了配置文件字段新增各平台自定义设置Cookie
- 20230903
- 修复了TikTok录制时报644无法录制的问题
@ -513,11 +660,9 @@ docker-compose stop
- 修复主播重新开播无法再次录制的问题
- 20230807
- 新增了斗鱼直播录制
- 修复显示录制完成之后会重新开始录制的问题
- 20230805
- 新增了虎牙直播录制其暂时只能用flv视频流进行录制
- Web API 新增了快手和虎牙这两个平台的直播流解析TikTok要代理
- 20230804
- 新增了快手直播录制,优化了部分代码
@ -527,10 +672,8 @@ docker-compose stop
- 新增了国际版抖音TikTok的直播录制去除冗余 简化了部分代码
- 20230724
- 新增了一个通过抖音直播间地址获取直播视频流链接的API接口上传即可用
</details>
&emsp;
&emsp;
## 有问题可以提issue ,后续我会在这里不断更新其他直播平台的录制 欢迎Star
## 有问题可以提issue, 我会在这里持续添加更多直播平台的录制 欢迎Star
####

View File

@ -1,13 +1,15 @@
[录制设置]
是否跳过代理检测(是/否) =
直播保存路径(不填则默认) =
language(zh_cn/en) = zh_cn
是否跳过代理检测(是/否) =
直播保存路径(不填则默认) =
保存文件夹是否以作者区分 =
保存文件夹是否以时间区分 =
保存文件夹是否以标题区分 =
保存文件名是否包含标题 =
是否去除名称中的表情符号 =
视频保存格式ts|mkv|flv|mp4|mp3音频|m4a音频 = ts
原画|超清|高清|标清|流畅 = 原画
是否使用代理ip(是/否) =
是否使用代理ip(是/否) =
代理地址 =
同一时间访问网络的线程数 = 3
循环时间(秒) = 300
@ -15,98 +17,115 @@
是否显示循环秒数 =
是否显示直播源地址 =
分段录制是否开启 =
是否强制启用https录制 =
录制空间剩余阈值(gb) = 1.0
视频分段时间(秒) = 1800
ts录制完成后自动转为mp4格式 =
录制完成后自动转为mp4格式 =
mp4格式重新编码为h264 =
追加格式后删除原文件 =
生成时间字幕文件 =
是否录制完成后执行bash脚本 =
bash脚本路径 =
使用代理录制的平台(逗号分隔) = tiktok, afreecatv, soop, pandalive, winktv, flextv, popkontv, twitch, liveme, showroom, chzzk
额外使用代理录制的平台(逗号分隔) =
是否录制完成后执行自定义脚本 =
自定义脚本执行命令 =
使用代理录制的平台(逗号分隔) = tiktok, sooplive, pandalive, winktv, flextv, popkontv, twitch, liveme, showroom, chzzk, shopee, shp, youtu
额外使用代理录制的平台(逗号分隔) =
[推送配置]
# 可选微信|钉钉|tg|邮箱|bark|ntfy 可填多个
直播状态推送渠道 =
钉钉推送接口链接 =
微信推送接口链接 =
bark推送接口链接 =
# 可选微信|钉钉|tg|邮箱|bark|ntfy|pushplus 可填多个
直播状态推送渠道 =
钉钉推送接口链接 =
微信推送接口链接 =
bark推送接口链接 =
bark推送中断级别 = active
bark推送铃声 =
钉钉通知@对象(填手机号) =
tgapi令牌 =
tg聊天id(个人或者群组id) =
smtp邮件服务器 =
邮箱登录账号 =
发件人密码(授权码) =
发件人邮箱 =
发件人显示昵称 =
收件人邮箱 =
bark推送铃声 =
钉钉通知@对象(填手机号) =
钉钉通知@全体(是/否) =
tgapi令牌 =
tg聊天id(个人或者群组id) =
smtp邮件服务器 =
是否使用SMTP服务SSL加密(是/否) =
SMTP邮件服务器端口 =
邮箱登录账号 =
发件人密码(授权码) =
发件人邮箱 =
发件人显示昵称 =
收件人邮箱 =
ntfy推送地址 = https://ntfy.sh/xxxx
ntfy推送标签 = tada
ntfy推送邮箱 =
自定义推送标题 =
自定义开播推送内容 =
自定义关播推送内容 =
只推送通知不录制(是/否) =
直播推送检测频率(秒) = 1800
开播推送开启(是/否)=
关播推送开启(是/否)=
ntfy推送邮箱 =
pushplus推送token =
自定义推送标题 =
自定义开播推送内容 =
自定义关播推送内容 =
只推送通知不录制(是/否) =
直播推送检测频率(秒) = 1800
开播推送开启(是/否) =
关播推送开启(是/否)=
[Cookie]
# 录制抖音必填
抖音cookie = ttwid=1%7CB1qls3GdnZhUov9o2NxOMxxYS2ff6OSvEWbv0ytbES4%7C1680522049%7C280d802d6d478e3e78d0c807f7c487e7ffec0ae4e5fdd6a0fe74c3c6af149511; my_rd=1; passport_csrf_token=3ab34460fa656183fccfb904b16ff742; passport_csrf_token_default=3ab34460fa656183fccfb904b16ff742; d_ticket=9f562383ac0547d0b561904513229d76c9c21; n_mh=hvnJEQ4Q5eiH74-84kTFUyv4VK8xtSrpRZG1AhCeFNI; store-region=cn-fj; store-region-src=uid; LOGIN_STATUS=1; __security_server_data_status=1; FORCE_LOGIN=%7B%22videoConsumedRemainSeconds%22%3A180%7D; pwa2=%223%7C0%7C3%7C0%22; download_guide=%223%2F20230729%2F0%22; volume_info=%7B%22isUserMute%22%3Afalse%2C%22isMute%22%3Afalse%2C%22volume%22%3A0.6%7D; strategyABtestKey=%221690824679.923%22; stream_recommend_feed_params=%22%7B%5C%22cookie_enabled%5C%22%3Atrue%2C%5C%22screen_width%5C%22%3A1536%2C%5C%22screen_height%5C%22%3A864%2C%5C%22browser_online%5C%22%3Atrue%2C%5C%22cpu_core_num%5C%22%3A8%2C%5C%22device_memory%5C%22%3A8%2C%5C%22downlink%5C%22%3A10%2C%5C%22effective_type%5C%22%3A%5C%224g%5C%22%2C%5C%22round_trip_time%5C%22%3A150%7D%22; VIDEO_FILTER_MEMO_SELECT=%7B%22expireTime%22%3A1691443863751%2C%22type%22%3Anull%7D; home_can_add_dy_2_desktop=%221%22; __live_version__=%221.1.1.2169%22; device_web_cpu_core=8; device_web_memory_size=8; xgplayer_user_id=346045893336; csrf_session_id=2e00356b5cd8544d17a0e66484946f28; odin_tt=724eb4dd23bc6ffaed9a1571ac4c757ef597768a70c75fef695b95845b7ffcd8b1524278c2ac31c2587996d058e03414595f0a4e856c53bd0d5e5f56dc6d82e24004dc77773e6b83ced6f80f1bb70627; __ac_nonce=064caded4009deafd8b89; __ac_signature=_02B4Z6wo00f01HLUuwwAAIDBh6tRkVLvBQBy9L-AAHiHf7; ttcid=2e9619ebbb8449eaa3d5a42d8ce88ec835; webcast_leading_last_show_time=1691016922379; webcast_leading_total_show_times=1; webcast_local_quality=sd; live_can_add_dy_2_desktop=%221%22; msToken=1JDHnVPw_9yTvzIrwb7cQj8dCMNOoesXbA_IooV8cezcOdpe4pzusZE7NB7tZn9TBXPr0ylxmv-KMs5rqbNUBHP4P7VBFUu0ZAht_BEylqrLpzgt3y5ne_38hXDOX8o=; msToken=jV_yeN1IQKUd9PlNtpL7k5vthGKcHo0dEh_QPUQhr8G3cuYv-Jbb4NnIxGDmhVOkZOCSihNpA2kvYtHiTW25XNNX_yrsv5FN8O6zm3qmCIXcEe0LywLn7oBO2gITEeg=; tt_scid=mYfqpfbDjqXrIGJuQ7q-DlQJfUSG51qG.KUdzztuGP83OjuVLXnQHjsz-BRHRJu4e986
快手cookie =
tiktok_cookie =
虎牙cookie =
斗鱼cookie =
yy_cookie =
b站cookie =
小红书cookie =
bigo_cookie =
blued_cookie =
afreecatv_cookie =
netease_cookie =
千度热播_cookie =
pandatv_cookie =
猫耳fm_cookie =
winktv_cookie =
flextv_cookie =
look_cookie =
twitcasting_cookie =
baidu_cookie =
weibo_cookie =
kugou_cookie =
twitch_cookie =
liveme_cookie =
huajiao_cookie =
liuxing_cookie =
showroom_cookie =
acfun_cookie =
changliao_cookie =
快手cookie =
tiktok_cookie =
虎牙cookie =
斗鱼cookie =
yy_cookie =
b站cookie =
小红书cookie =
bigo_cookie =
blued_cookie =
sooplive_cookie =
netease_cookie =
千度热播_cookie =
pandatv_cookie =
猫耳fm_cookie =
winktv_cookie =
flextv_cookie =
look_cookie =
twitcasting_cookie =
baidu_cookie =
weibo_cookie =
kugou_cookie =
twitch_cookie =
liveme_cookie =
huajiao_cookie =
liuxing_cookie =
showroom_cookie =
acfun_cookie =
changliao_cookie =
yinbo_cookie =
yingke_cookie =
zhihu_cookie =
chzzk_cookie =
haixiu_cookie =
vvxqiu_cookie =
17live_cookie =
langlive_cookie =
pplive_cookie =
6room_cookie =
lehaitv_cookie =
huamao_cookie =
yingke_cookie =
zhihu_cookie =
chzzk_cookie =
haixiu_cookie =
vvxqiu_cookie =
17live_cookie =
langlive_cookie =
pplive_cookie =
6room_cookie =
lehaitv_cookie =
huamao_cookie =
shopee_cookie =
youtube_cookie =
taobao_cookie =
jd_cookie =
faceit_cookie =
migu_cookie =
lianjie_cookie =
laixiu_cookie =
picarto_cookie =
[Authorization]
popkontv_token =
popkontv_token =
[账号密码]
afreecatv账号 =
afreecatv密码 =
flextv账号 =
flextv密码 =
popkontv账号 =
sooplive账号 =
sooplive密码 =
flextv账号 =
flextv密码 =
popkontv账号 =
partner_code = P-00001
popkontv密码 =
popkontv密码 =
twitcasting账号类型 = normal
twitcasting账号 =
twitcasting密码 =
twitcasting账号 =
twitcasting密码 =

62
demo.py
View File

@ -1,8 +1,10 @@
# -*- coding: utf-8 -*-
from douyinliverecorder.logger import logger
from douyinliverecorder import spider
import asyncio
from src.logger import logger
from src import spider
# 以下示例直播间链接不保证时效性,请自行查看链接是否能正常访问
# Please note that the following example live room links may not be up-to-date
LIVE_STREAM_CONFIG = {
"douyin": {
"url": "https://live.douyin.com/745964462470",
@ -33,7 +35,7 @@ LIVE_STREAM_CONFIG = {
"func": spider.get_bilibili_stream_data,
},
"xhs": {
"url": "http://xhslink.com/O9f9fM",
"url": "https://www.xiaohongshu.com/user/profile/6330049c000000002303c7ed?appuid=5f3f478a00000000010005b3",
"func": spider.get_xhs_stream_url,
},
"bigo": {
@ -44,13 +46,9 @@ LIVE_STREAM_CONFIG = {
"url": "https://app.blued.cn/live?id=Mp6G2R",
"func": spider.get_blued_stream_url,
},
"afreecatv": {
"url": "https://play.afreecatv.com/sw7love",
"func": spider.get_afreecatv_stream_data,
},
"soop": {
"sooplive": {
"url": "https://play.sooplive.co.kr/sw7love",
"func": spider.get_afreecatv_stream_data,
"func": spider.get_sooplive_stream_data,
},
"netease": {
"url": "https://cc.163.com/583946984",
@ -73,7 +71,7 @@ LIVE_STREAM_CONFIG = {
"func": spider.get_winktv_stream_data,
},
"flextv": {
"url": "https://www.flextv.co.kr/channels/593127/live",
"url": "https://www.ttinglive.com/channels/685479/live",
"func": spider.get_flextv_stream_data,
},
"looklive": {
@ -145,8 +143,8 @@ LIVE_STREAM_CONFIG = {
"func": spider.get_haixiu_stream_url,
},
"vvxqiu": {
"url": "https://h5webcdn-pro.vvxqiu.com//activity/videoShare/videoShare.html?h5Server=https://h5p.vvxqiu.com"
"&roomId=LP115924473&platformId=vvstar",
"url": "https://h5webcdnp.vvxqiu.com//activity/videoShare/videoShare.html?h5Server=https://h5p.vvxqiu.com&"
"roomId=LP115664695&platformId=vvstar",
"func": spider.get_vvxqiu_stream_url,
},
"17live": {
@ -172,15 +170,51 @@ LIVE_STREAM_CONFIG = {
"huamao": {
"url": "https://h.catshow168.com/live/preview.html?uid=19066357&anchorUid=18895331",
"func": spider.get_pplive_stream_url,
},
"shopee": {
"url": "https://sg.shp.ee/GmpXeuf?uid=1006401066&session=802458",
"func": spider.get_shopee_stream_url,
},
"youtube": {
"url": "https://www.youtube.com/watch?v=cS6zS5hi1w0",
"func": spider.get_youtube_stream_url,
},
"taobao": {
"url": "https://m.tb.cn/h.TWp0HTd",
"func": spider.get_taobao_stream_url,
},
"jd": {
"url": "https://3.cn/28MLBy-E",
"func": spider.get_jd_stream_url,
},
"faceit": {
"url": "https://www.faceit.com/zh/players/Compl1/stream",
"func": spider.get_faceit_stream_data,
},
"lianjie": {
"url": "https://show.lailianjie.com/10000258",
"func": spider.get_lianjie_stream_url,
},
"migu": {
"url": "https://www.miguvideo.com/p/live/120000541321",
"func": spider.get_migu_stream_url,
},
"laixiu": {
"url": "https://www.imkktv.com/h5/share/video.html?uid=1845195&roomId=1710496",
"func": spider.get_laixiu_stream_url,
},
"picarto": {
"url": "https://www.picarto.tv/cuteavalanche",
"func": spider.get_picarto_stream_url,
}
}
def test_live_stream(platform_name: str, proxy_addr=None) -> None:
def test_live_stream(platform_name: str, proxy_addr=None, cookies=None) -> None:
if platform_name in LIVE_STREAM_CONFIG:
config = LIVE_STREAM_CONFIG[platform_name]
try:
stream_data = config['func'](config['url'], proxy_addr=proxy_addr)
stream_data = asyncio.run(config['func'](config['url'], proxy_addr=proxy_addr, cookies=cookies))
logger.debug(f"Stream data for {platform_name}: {stream_data}")
except Exception as e:
logger.error(f"Error fetching stream data for {platform_name}: {e}")

View File

@ -1,126 +0,0 @@
# -*- encoding: utf-8 -*-
"""
Author: Hmily
GitHub:https://github.com/ihmily
Date: 2023-07-17 23:52:05
Update: 2024-10-08 23:35:00
Copyright (c) 2023 by Hmily, All Rights Reserved.
"""
import json
import re
import urllib.parse
import execjs
import requests
import urllib.request
from . import JS_SCRIPT_PATH
no_proxy_handler = urllib.request.ProxyHandler({})
opener = urllib.request.build_opener(no_proxy_handler)
HEADERS = {
'User-Agent': 'Mozilla/5.0 (Linux; Android 11; SAMSUNG SM-G973U) AppleWebKit/537.36 (KHTML, like Gecko) '
'SamsungBrowser/14.2 Chrome/87.0.4280.141 Mobile Safari/537.36',
'Accept-Language': 'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
'Cookie': 's_v_web_id=verify_lk07kv74_QZYCUApD_xhiB_405x_Ax51_GYO9bUIyZQVf'
}
HEADERS_PC = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.5845.97 '
'Safari/537.36 Core/1.116.438.400 QQBrowser/13.0.6070.400',
'Cookie': 'sessionid=7494ae59ae06784454373ce25761e864; __ac_nonce=0670497840077ee4c9eb2; '
'__ac_signature=_02B4Z6wo00f012DZczQAAIDCJJBb3EjnINdg-XeAAL8-db; '
's_v_web_id=verify_m1ztgtjj_vuHnMLZD_iwZ9_4YO4_BdN1_7wLP3pyqXsf2; ',
}
# X-bogus算法
def get_xbogus(url: str, headers: dict | None = None) -> str:
if not headers or 'user-agent' not in (k.lower() for k in headers):
headers = HEADERS
query = urllib.parse.urlparse(url).query
xbogus = execjs.compile(open(f'{JS_SCRIPT_PATH}/x-bogus.js').read()).call('sign', query, headers.get("User-Agent", "user-agent"))
return xbogus
# 获取房间ID和用户secID
def get_sec_user_id(url: str, proxy_addr: str | None = None, headers: dict | None = None) -> tuple | None:
if not headers or all(k.lower() not in ['user-agent', 'cookie'] for k in headers):
headers = HEADERS
if proxy_addr:
proxies = {
'http': proxy_addr,
'https': proxy_addr
}
response = requests.get(url, headers=headers, proxies=proxies, timeout=15)
else:
response = opener.open(url, timeout=15)
redirect_url = response.url
if 'reflow/' in redirect_url:
sec_user_id = re.search(r'sec_user_id=([\w_\-]+)&', redirect_url).group(1)
room_id = redirect_url.split('?')[0].rsplit('/', maxsplit=1)[1]
return room_id, sec_user_id
# 获取抖音号
def get_unique_id(url: str, proxy_addr: str | None = None, headers: dict | None = None) -> str:
if not headers or all(k.lower() not in ['user-agent', 'cookie'] for k in headers):
headers = HEADERS_PC
if proxy_addr:
proxies = {
'http': proxy_addr,
'https': proxy_addr
}
response = requests.get(url, headers=headers, proxies=proxies, timeout=15)
else:
response = opener.open(url, timeout=15)
redirect_url = response.url
sec_user_id = redirect_url.split('?')[0].rsplit('/', maxsplit=1)[1]
resp = requests.get(f'https://www.douyin.com/user/{sec_user_id}', headers=headers)
unique_id = re.findall(r'undefined\\"},\\"uniqueId\\":\\"(.*?)\\",\\"customVerify', resp.text)[-1]
return unique_id
# 获取直播间webID
def get_live_room_id(room_id: str, sec_user_id: str, proxy_addr: str | None = None,
params: dict | None = None, headers: dict | None = None) -> str:
if not headers or all(k.lower() not in ['user-agent', 'cookie'] for k in headers):
headers = HEADERS
if not params:
params = {
"verifyFp": "verify_lk07kv74_QZYCUApD_xhiB_405x_Ax51_GYO9bUIyZQVf",
"type_id": "0",
"live_id": "1",
"room_id": room_id,
"sec_user_id": sec_user_id,
"app_id": "1128",
"msToken": "wrqzbEaTlsxt52-vxyZo_mIoL0RjNi1ZdDe7gzEGMUTVh_HvmbLLkQrA_1HKVOa2C6gkxb6IiY6TY2z8enAkPEwGq--gM"
"-me3Yudck2ailla5Q4osnYIHxd9dI4WtQ==",
}
api = f'https://webcast.amemv.com/webcast/room/reflow/info/?{urllib.parse.urlencode(params)}'
xbogus = get_xbogus(api)
api = api + "&X-Bogus=" + xbogus
if proxy_addr:
proxies = {
'http': proxy_addr,
'https': proxy_addr
}
response = requests.get(api, headers=headers, proxies=proxies, timeout=15)
json_str = response.text
else:
req = urllib.request.Request(api, headers=headers)
response = opener.open(req, timeout=15)
json_str = response.read().decode('utf-8')
json_data = json.loads(json_str)
return json_data['data']['room']['owner']['web_rid']
if __name__ == '__main__':
room_url = "https://v.douyin.com/iQLgKSj/"
_room_id, sec_uid = get_sec_user_id(room_url)
web_rid = get_live_room_id(_room_id, sec_uid)
print("return web_rid:", web_rid)

View File

@ -1,123 +0,0 @@
# -*- coding: utf-8 -*-
import os
import functools
import hashlib
import re
import traceback
from typing import Any
from collections import OrderedDict
import execjs
from .logger import logger
import configparser
def trace_error_decorator(func: callable) -> callable:
@functools.wraps(func)
def wrapper(*args: list, **kwargs: dict) -> Any:
try:
return func(*args, **kwargs)
except execjs.ProgramError:
logger.warning('Failed to execute JS code. Please check if the Node.js environment')
except Exception as e:
error_line = traceback.extract_tb(e.__traceback__)[-1].lineno
error_info = f"错误信息: type: {type(e).__name__}, {str(e)} in function {func.__name__} at line: {error_line}"
logger.error(error_info)
return []
return wrapper
def check_md5(file_path: str) -> str:
with open(file_path, 'rb') as fp:
file_md5 = hashlib.md5(fp.read()).hexdigest()
return file_md5
def dict_to_cookie_str(cookies_dict: dict) -> str:
cookie_str = '; '.join([f"{key}={value}" for key, value in cookies_dict.items()])
return cookie_str
def read_config_value(file_path: str, section: str, key: str) -> str | None:
config = configparser.ConfigParser()
try:
config.read(file_path, encoding='utf-8-sig')
except Exception as e:
print(f"读取配置文件时出错: {e}")
return None
if section in config:
if key in config[section]:
return config[section][key]
else:
print(f"键[{key}]不存在于部分[{section}]中。")
else:
print(f"部分[{section}]不存在于文件中。")
return None
def update_config(file_path: str, section: str, key: str, new_value: str) -> None:
config = configparser.ConfigParser()
try:
config.read(file_path, encoding='utf-8-sig')
except Exception as e:
print(f"读取配置文件时出错: {e}")
return
if section not in config:
print(f"部分[{section}]不存在于文件中。")
return
# 转义%字符
escaped_value = new_value.replace('%', '%%')
config[section][key] = escaped_value
try:
with open(file_path, 'w', encoding='utf-8-sig') as configfile:
config.write(configfile)
print(f"配置文件中[{section}]下的{key}的值已更新")
except Exception as e:
print(f"写入配置文件时出错: {e}")
def get_file_paths(directory: str) -> list:
file_paths = []
for root, dirs, files in os.walk(directory):
for file in files:
file_paths.append(os.path.join(root, file))
return file_paths
def remove_emojis(text: str, replace_text=r''):
emoji_pattern = re.compile(
"["
"\U0001F1E0-\U0001F1FF" # flags (iOS)
"\U0001F300-\U0001F5FF" # symbols & pictographs
"\U0001F600-\U0001F64F" # emoticons
"\U0001F680-\U0001F6FF" # transport & map symbols
"\U0001F700-\U0001F77F" # alchemical symbols
"\U0001F780-\U0001F7FF" # Geometric Shapes Extended
"\U0001F800-\U0001F8FF" # Supplemental Arrows-C
"\U0001F900-\U0001F9FF" # Supplemental Symbols and Pictographs
"\U0001FA00-\U0001FA6F" # Chess Symbols
"\U0001FA70-\U0001FAFF" # Symbols and Pictographs Extended-A
"\U00002702-\U000027B0" # Dingbats
"]+",
flags=re.UNICODE
)
return emoji_pattern.sub(replace_text, text)
def remove_duplicate_lines(file_path):
unique_lines = OrderedDict()
text_encoding = 'utf-8-sig'
with open(file_path, 'r', encoding=text_encoding) as input_file:
for line in input_file:
unique_lines[line.strip()] = None
with open(file_path, 'w', encoding=text_encoding) as output_file:
for line in unique_lines:
output_file.write(line + '\n')

Binary file not shown.

221
ffmpeg_install.py Normal file
View File

@ -0,0 +1,221 @@
# -*- coding: utf-8 -*-
"""
Author: Hmily
GitHub: https://github.com/ihmily
Copyright (c) 2024 by Hmily, All Rights Reserved.
"""
import os
import re
import subprocess
import sys
import platform
import zipfile
from pathlib import Path
import requests
from tqdm import tqdm
from src.logger import logger
current_platform = platform.system()
execute_dir = os.path.split(os.path.realpath(sys.argv[0]))[0]
current_env_path = os.environ.get('PATH')
ffmpeg_path = os.path.join(execute_dir, 'ffmpeg')
def unzip_file(zip_path: str | Path, extract_to: str | Path, delete: bool = True) -> None:
if not os.path.exists(extract_to):
os.makedirs(extract_to)
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
zip_ref.extractall(extract_to)
if delete and os.path.exists(zip_path):
os.remove(zip_path)
def get_lanzou_download_link(url: str, password: str | None = None) -> str | None:
try:
headers = {
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
'Origin': 'https://wweb.lanzouv.com',
'Referer': 'https://wweb.lanzouv.com/iXncv0dly6mh',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/121.0.0.0 Safari/537.36 Edg/121.0.0.0',
}
response = requests.get(url, headers=headers)
sign = re.search("var skdklds = '(.*?)';", response.text).group(1)
data = {
'action': 'downprocess',
'sign': sign,
'p': password,
'kd': '1',
}
response = requests.post('https://wweb.lanzouv.com/ajaxm.php', headers=headers, data=data)
json_data = response.json()
download_url = json_data['dom'] + "/file/" + json_data['url']
response = requests.get(download_url, headers=headers)
return response.url
except Exception as e:
logger.error(f"Failed to obtain ffmpeg download address. {e}")
def install_ffmpeg_windows():
try:
logger.warning("ffmpeg is not installed.")
logger.debug("Installing the latest version of ffmpeg for Windows...")
ffmpeg_url = get_lanzou_download_link('https://wweb.lanzouv.com/iHAc22ly3r3g', 'eots')
if ffmpeg_url:
full_file_name = 'ffmpeg_latest_build_20250124.zip'
version = 'v20250124'
zip_file_path = Path(execute_dir) / full_file_name
if Path(zip_file_path).exists():
logger.debug("ffmpeg installation file already exists, start install...")
else:
response = requests.get(ffmpeg_url, stream=True)
total_size = int(response.headers.get('Content-Length', 0))
block_size = 1024
with tqdm(total=total_size, unit="B", unit_scale=True,
ncols=100, desc=f'Downloading ffmpeg ({version})') as t:
with open(zip_file_path, 'wb') as f:
for data in response.iter_content(block_size):
t.update(len(data))
f.write(data)
unzip_file(zip_file_path, execute_dir)
os.environ['PATH'] = ffmpeg_path + os.pathsep + current_env_path
result = subprocess.run(["ffmpeg", "-version"], capture_output=True)
if result.returncode == 0:
logger.debug('ffmpeg installation was successful')
else:
logger.error('ffmpeg installation failed. Please manually install ffmpeg by yourself')
return True
else:
logger.error("Please manually install ffmpeg by yourself")
except Exception as e:
logger.error(f"type: {type(e).__name__}, ffmpeg installation failed {e}")
def install_ffmpeg_mac():
logger.warning("ffmpeg is not installed.")
logger.debug("Installing the stable version of ffmpeg for macOS...")
try:
result = subprocess.run(["brew", "install", "ffmpeg"], capture_output=True)
if result.returncode == 0:
logger.debug('ffmpeg installation was successful. Restart for changes to take effect.')
return True
else:
logger.error("ffmpeg installation failed")
except subprocess.CalledProcessError as e:
logger.error(f"Failed to install ffmpeg using Homebrew. {e}")
logger.error("Please install ffmpeg manually or check your Homebrew installation.")
except Exception as e:
logger.error(f"An unexpected error occurred: {e}")
def install_ffmpeg_linux():
is_RHS = True
try:
logger.warning("ffmpeg is not installed.")
logger.debug("Trying to install the stable version of ffmpeg")
result = subprocess.run(['yum', '-y', 'update'], capture_output=True)
if result.returncode != 0:
logger.error("Failed to update package lists using yum.")
return False
result = subprocess.run(['yum', 'install', '-y', 'ffmpeg'], capture_output=True)
if result.returncode == 0:
logger.debug("ffmpeg installation was successful using yum. Restart for changes to take effect.")
return True
logger.error(result.stderr.decode('utf-8').strip())
except FileNotFoundError:
logger.debug("yum command not found, trying to install using apt...")
is_RHS = False
except Exception as e:
logger.error(f"An error occurred while trying to install ffmpeg using yum: {e}")
if not is_RHS:
try:
logger.debug("Trying to install the stable version of ffmpeg for Linux using apt...")
result = subprocess.run(['apt', 'update'], capture_output=True)
if result.returncode != 0:
logger.error("Failed to update package lists using apt")
return False
result = subprocess.run(['apt', 'install', '-y', 'ffmpeg'], capture_output=True)
if result.returncode == 0:
logger.debug("ffmpeg installation was successful using apt. Restart for changes to take effect.")
return True
else:
logger.error(result.stderr.decode('utf-8').strip())
except FileNotFoundError:
logger.error("apt command not found, unable to install ffmpeg. Please manually install ffmpeg by yourself")
except Exception as e:
logger.error(f"An error occurred while trying to install ffmpeg using apt: {e}")
logger.error("Manual installation of ffmpeg is required. Please manually install ffmpeg by yourself.")
return False
def install_ffmpeg() -> bool:
if current_platform == "Windows":
return install_ffmpeg_windows()
elif current_platform == "Linux":
return install_ffmpeg_linux()
elif current_platform == "Darwin":
return install_ffmpeg_mac()
else:
logger.debug(f"ffmpeg auto installation is not supported on this platform: {current_platform}. "
f"Please install ffmpeg manually.")
return False
def ensure_ffmpeg_installed(func):
def wrapper(*args, **kwargs):
try:
result = subprocess.run(['ffmpeg', '-version'], capture_output=True)
version = result.stdout.strip()
if result.returncode == 0 and version:
return func(*args, **kwargs)
except FileNotFoundError:
pass
return False
def wrapped_func(*args, **kwargs):
if sys.version_info >= (3, 7):
res = wrapper(*args, **kwargs)
else:
res = wrapper(*args, **kwargs)
if not res:
install_ffmpeg()
res = wrapper(*args, **kwargs)
if not res:
raise RuntimeError("ffmpeg is not installed.")
return func(*args, **kwargs)
return wrapped_func
def check_ffmpeg_installed() -> bool:
try:
result = subprocess.run(['ffmpeg', '-version'], capture_output=True)
version = result.stdout.strip()
if result.returncode == 0 and version:
return True
except FileNotFoundError:
pass
except OSError as e:
print(f"OSError occurred: {e}. ffmpeg may not be installed correctly or is not available in the system PATH.")
print("Please delete the ffmpeg and try to download and install again.")
except Exception as e:
print(f"An unexpected error occurred: {e}")
return False
def check_ffmpeg() -> bool:
if not check_ffmpeg_installed():
return install_ffmpeg()
return True

32
i18n.py Normal file
View File

@ -0,0 +1,32 @@
import os
import sys
import gettext
import inspect
import builtins
from pathlib import Path
def init_gettext(locale_dir, locale_name):
gettext.bindtextdomain('zh_CN', locale_dir)
gettext.textdomain('zh_CN')
os.environ['LANG'] = f'{locale_name}.utf8'
return gettext.gettext
execute_dir = os.path.split(os.path.realpath(sys.argv[0]))[0]
if os.path.exists(Path(execute_dir) / '_internal/i18n'):
locale_path = Path(execute_dir) / '_internal/i18n'
else:
locale_path = Path(execute_dir) / 'i18n'
_tr = init_gettext(locale_path, 'zh_CN')
original_print = builtins.print
package_name = 'src'
def translated_print(*args, **kwargs):
for arg in args:
if package_name in inspect.stack()[1].filename:
translated_arg = _tr(str(arg))
else:
translated_arg = str(arg)
original_print(translated_arg, **kwargs)

View File

Binary file not shown.

View File

@ -0,0 +1,85 @@
# DouyinLiveRecorder.
# Copyright (C) 2024 Hmily
# This file is distributed under the same license as the DouyinLiveRecorder package.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: 4.0.1\n"
"POT-Creation-Date: 2024-10-20 00:00+0800\n"
"PO-Revision-Date: 2024-11-09 03:05+0800\n"
"Last-Translator: Hmily <EMAIL@ADDRESS>\n"
"Language-Team: Chinese\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: zh_CN\n"
"Plural-Forms: nplurals=1; plural=0;\n"
#: douyinliverecorder/spider.py
msgid "IP banned. Please change device or network."
msgstr "IP被禁止 请更换设备或网络"
msgid "The anchor did not start broadcasting."
msgstr "主播并未开播"
msgid "sooplive platform login successful! Starting to fetch live streaming data..."
msgstr "sooplive平台登录成功开始获取直播数据..."
msgid "sooplive live stream failed to retrieve, the live stream just ended."
msgstr "sooplive直播获取失败,该直播间刚结束直播"
msgid "sooplive live stream retrieval failed, the live needs 19+, you are not logged in."
msgstr "soop直播获取失败该直播间需要年龄19+观看,您尚未登录"
msgid "Attempting to log in to the sooplive live streaming platform with your account and password, please ensure it is configured."
msgstr "正在尝试使用您的账号和密码登录soop直播平台请确保已在config配置文件中配置"
msgid "error messagePlease check if the input sooplive live room address is correct."
msgstr "错误信息请检查输入的sooplive直播间地址是否正确"
msgid "Please check if the FlexTV account and password in the configuration file are correct."
msgstr "请检查配置文件中的FlexTV账号和密码是否正确"
msgid "FlexTV live stream retrieval failed [not logged in]: 19+ live streams are only available for logged-in adults."
msgstr "FlexTV直播获取失败[未登录]: 19+直播需要登录后是成人才可观看"
msgid "Attempting to log in to the FlexTV live streaming platform, please ensure your account and password are correctly filled in the configuration file."
msgstr "正在尝试登录FlexTV直播平台请确保已在配置文件中填写好您的账号和密码"
msgid "Logging into FlexTV platform..."
msgstr "FlexTV平台登录中..."
msgid "Logged into FlexTV platform successfully! Starting to fetch live streaming data..."
msgstr "FlexTV平台登录成功开始获取直播数据..."
msgid "Look live currently only supports audio live streaming, not video live streaming!"
msgstr "Look直播暂时只支持音频直播不支持Look视频直播!"
msgid "Failed to retrieve popkontv live stream [token does not exist or has expired]: Please log in to watch."
msgstr "popkontv直播获取失败[token不存在或者已过期]: 请登录后观看"
msgid "Attempting to log in to the popkontv live streaming platform, please ensure your account and password are correctly filled in the configuration file."
msgstr "正在尝试登录popkontv直播平台请确保已在配置文件中填写好您的账号和密码"
msgid "Logging into popkontv platform..."
msgstr "popkontv平台登录中..."
msgid "Logged into popkontv platform successfully! Starting to fetch live streaming data..."
msgstr "popkontv平台登录成功开始获取直播数据..."
msgid "Attempting to log in to TwitCasting..."
msgstr "TwitCasting正在尝试登录..."
msgid "TwitCasting login successful! Starting to fetch data..."
msgstr "TwitCasting 登录成功!开始获取数据..."
msgid "Failed to retrieve TwitCasting data, attempting to log in..."
msgstr "获取TwitCasting数据失败正在尝试登录..."
msgid "Failed to retrieve live room data, the Huajiao live room address is not fixed, please manually change the address for recording."
msgstr "获取直播间数据失败,花椒直播间地址是非固定的,请手动更换地址进行录制"
msgid "Fetch shopee live data failed, please update the address of the live broadcast room and try again."
msgstr "获取shopee直播间数据失败请手动更换直播录制地址后重试"

1173
main.py

File diff suppressed because it is too large Load Diff

View File

@ -4,7 +4,7 @@
Author: Hmily
GitHub: https://github.com/ihmily
Date: 2023-09-03 19:18:36
Update: 2024-10-23 23:37:12
Update: 2025-01-23 17:16:12
Copyright (c) 2023-2024 by Hmily, All Rights Reserved.
"""
from typing import Dict, Any
@ -22,7 +22,7 @@ opener = urllib.request.build_opener(no_proxy_handler)
headers: Dict[str, str] = {'Content-Type': 'application/json'}
def dingtalk(url: str, content: str, number: str = None) -> Dict[str, Any]:
def dingtalk(url: str, content: str, number: str = None, is_atall: bool = False) -> Dict[str, Any]:
success = []
error = []
api_list = url.replace('', ',').split(',') if url.strip() else []
@ -36,6 +36,7 @@ def dingtalk(url: str, content: str, number: str = None) -> Dict[str, Any]:
"atMobiles": [
number
],
"isAtAll": is_atall
},
}
try:
@ -66,7 +67,7 @@ def xizhi(url: str, title: str, content: str) -> Dict[str, Any]:
}
try:
data = json.dumps(json_data).encode('utf-8')
req = urllib.request.Request(url, data=data, headers=headers)
req = urllib.request.Request(api, data=data, headers=headers)
response = opener.open(req, timeout=10)
json_str = response.read().decode('utf-8')
json_data = json.loads(json_str)
@ -82,7 +83,7 @@ def xizhi(url: str, title: str, content: str) -> Dict[str, Any]:
def send_email(email_host: str, login_email: str, email_pass: str, sender_email: str, sender_name: str,
to_email: str, title: str, content: str) -> Dict[str, Any]:
to_email: str, title: str, content: str, smtp_port: str = None, open_ssl: bool = True) -> Dict[str, Any]:
receivers = to_email.replace('', ',').split(',') if to_email.strip() else []
try:
@ -96,7 +97,12 @@ def send_email(email_host: str, login_email: str, email_pass: str, sender_email:
t_apart = MIMEText(content, 'plain', 'utf-8')
message.attach(t_apart)
smtp_obj = smtplib.SMTP_SSL(email_host, 465)
if open_ssl:
smtp_port = int(smtp_port) or 465
smtp_obj = smtplib.SMTP_SSL(email_host, smtp_port)
else:
smtp_port = int(smtp_port) or 25
smtp_obj = smtplib.SMTP(email_host, smtp_port)
smtp_obj.login(login_email, email_pass)
smtp_obj.sendmail(sender_email, receivers, message.as_string())
return {"success": receivers, "error": []}
@ -207,6 +213,42 @@ def ntfy(api: str, title: str = "message", content: str = 'test', tags: str = 't
return {"success": success, "error": error}
def pushplus(token: str, title: str, content: str) -> Dict[str, Any]:
"""
PushPlus推送通知
API文档: https://www.pushplus.plus/doc/
"""
success = []
error = []
token_list = token.replace('', ',').split(',') if token.strip() else []
for _token in token_list:
json_data = {
'token': _token,
'title': title,
'content': content
}
try:
url = 'https://www.pushplus.plus/send'
data = json.dumps(json_data).encode('utf-8')
req = urllib.request.Request(url, data=data, headers=headers)
response = opener.open(req, timeout=10)
json_str = response.read().decode('utf-8')
json_data = json.loads(json_str)
if json_data.get('code') == 200:
success.append(_token)
else:
error.append(_token)
print(f'PushPlus推送失败, Token{_token}, 失败信息:{json_data.get("msg", "未知错误")}')
except Exception as e:
error.append(_token)
print(f'PushPlus推送失败, Token{_token}, 错误信息:{e}')
return {"success": success, "error": error}
if __name__ == '__main__':
send_title = '直播通知' # 标题
send_content = '张三 开播了!' # 推送内容
@ -214,6 +256,7 @@ if __name__ == '__main__':
# 钉钉推送通知
webhook_api = '' # 替换成自己Webhook链接,参考文档https://open.dingtalk.com/document/robots/custom-robot-access
phone_number = '' # 被@用户的手机号码
is_atall = '' # 是否@全体
# dingtalk(webhook_api, send_content, phone_number)
# 微信推送通知
@ -235,7 +278,7 @@ if __name__ == '__main__':
# sender_name="",
# to_email="",
# title="",
# content=""
# content="",
# )
bark_url = 'https://xxx.xxx.com/key/'
@ -246,3 +289,7 @@ if __name__ == '__main__':
title="直播推送",
content="xxx已开播",
)
# PushPlus推送通知
pushplus_token = '' # 替换成自己的PushPlus Token获取地址https://www.pushplus.plus/
# pushplus(pushplus_token, send_title, send_content)

View File

@ -1,4 +0,0 @@
[virtualenvs]
in-project = true
create = true
prefer-active-python = true

View File

@ -1,26 +1,23 @@
[project]
requires-python = ">=3.10"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "douyinliverecorder"
version = "3.0.9"
description = "An easy tool for recording live streams"
authors = ["Hmily"]
license = "MIT"
name = "DouyinLiveRecorder"
version = "4.0.7"
description = "可循环值守和多人录制的直播录制软件, 支持抖音、TikTok、Youtube、快手、虎牙、斗鱼、B站、小红书、pandatv、sooplive、flextv、popkontv、twitcasting、winktv、百度、微博、酷狗、17Live、Twitch、Acfun、CHZZK、shopee等40+平台直播录制"
readme = "README.md"
homepage = "https://github.com/ihmily/DouyinLiveRecorder"
repository = "https://github.com/ihmily/DouyinLiveRecorder"
keywords = ["douyin", "live", "recorder"]
authors = [{name = "Hmily"}]
license = { text = "MIT" }
requires-python = ">=3.10"
dependencies = [
"requests>=2.31.0",
"loguru>=0.7.3",
"pycryptodome>=3.20.0",
"distro>=1.9.0",
"tqdm>=4.67.1",
"httpx[http2]>=0.28.1",
"PyExecJS>=1.5.1"
]
[tool.poetry.dependencies]
python = "^3.10"
requests = "^2.25.1"
PyExecJS = "^1.5.1"
loguru = "^0.5.3"
pycryptodome = "^3.10.1"
distro = "^1.9.0"
tqdm = "^4.66.5"
[project.urls]
"Homepage" = "https://github.com/ihmily/DouyinLiveRecorder"
"Documentation" = "https://github.com/ihmily/DouyinLiveRecorder"
"Repository" = "https://github.com/ihmily/DouyinLiveRecorder"
"Issues" = "https://github.com/ihmily/DouyinLiveRecorder/issues"

View File

@ -1,6 +1,7 @@
requests
PyExecJS
loguru==0.7.2
pycryptodome==3.20.0
distro==1.9.0
tqdm==4.66.5
requests>=2.31.0
loguru>=0.7.3
pycryptodome>=3.20.0
distro>=1.9.0
tqdm>=4.67.1
httpx[http2]>=0.28.1
PyExecJS>=1.5.1

View File

@ -1,31 +0,0 @@
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='douyinliverecorder',
version='3.0.9',
author='Hmily',
description='An easy tool for recording live streams',
long_description=open('README.md', encoding='utf-8').read(),
long_description_content_type='text/markdown',
url='https://github.com/ihmily/DouyinLiveRecorder',
packages=find_packages(),
install_requires=[
'requests',
'PyExecJS',
'loguru',
'pycryptodome',
'distro',
'tqdm'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
'Programming Language :: Python :: 3.12',
'Programming Language :: Python :: 3.13',
]
)

454
src/ab_sign.py Normal file
View File

@ -0,0 +1,454 @@
# -*- encoding: utf-8 -*-
import math
import time
def rc4_encrypt(plaintext: str, key: str) -> str:
# 初始化状态数组
s = list(range(256))
# 使用密钥对状态数组进行置换
j = 0
for i in range(256):
j = (j + s[i] + ord(key[i % len(key)])) % 256
s[i], s[j] = s[j], s[i]
# 生成密钥流并加密
i = j = 0
result = []
for char in plaintext:
i = (i + 1) % 256
j = (j + s[i]) % 256
s[i], s[j] = s[j], s[i]
t = (s[i] + s[j]) % 256
result.append(chr(s[t] ^ ord(char)))
return ''.join(result)
def left_rotate(x: int, n: int) -> int:
n %= 32
return ((x << n) | (x >> (32 - n))) & 0xFFFFFFFF
def get_t_j(j: int) -> int:
if 0 <= j < 16:
return 2043430169 # 0x79CC4519
elif 16 <= j < 64:
return 2055708042 # 0x7A879D8A
else:
raise ValueError("invalid j for constant Tj")
def ff_j(j: int, x: int, y: int, z: int) -> int:
if 0 <= j < 16:
return (x ^ y ^ z) & 0xFFFFFFFF
elif 16 <= j < 64:
return ((x & y) | (x & z) | (y & z)) & 0xFFFFFFFF
else:
raise ValueError("invalid j for bool function FF")
def gg_j(j: int, x: int, y: int, z: int) -> int:
if 0 <= j < 16:
return (x ^ y ^ z) & 0xFFFFFFFF
elif 16 <= j < 64:
return ((x & y) | (~x & z)) & 0xFFFFFFFF
else:
raise ValueError("invalid j for bool function GG")
class SM3:
def __init__(self):
self.reg = []
self.chunk = []
self.size = 0
self.reset()
def reset(self):
# 初始化寄存器值 - 修正为与JS版本相同的值
self.reg = [
1937774191, 1226093241, 388252375, 3666478592,
2842636476, 372324522, 3817729613, 2969243214
]
self.chunk = []
self.size = 0
def write(self, data):
# 将输入转换为字节数组
if isinstance(data, str):
# 直接转换为UTF-8字节列表
a = list(data.encode('utf-8'))
else:
a = data
self.size += len(a)
f = 64 - len(self.chunk)
if len(a) < f:
# 如果数据长度小于剩余空间,直接添加
self.chunk.extend(a)
else:
# 否则分块处理
self.chunk.extend(a[:f])
while len(self.chunk) >= 64:
self._compress(self.chunk)
if f < len(a):
self.chunk = a[f:min(f + 64, len(a))]
else:
self.chunk = []
f += 64
def _fill(self):
# 计算比特长度
bit_length = 8 * self.size
# 添加填充位
padding_pos = len(self.chunk)
self.chunk.append(0x80)
padding_pos = (padding_pos + 1) % 64
# 如果剩余空间不足8字节则填充到下一个块
if 64 - padding_pos < 8:
padding_pos -= 64
# 填充0直到剩余8字节用于存储长度
while padding_pos < 56:
self.chunk.append(0)
padding_pos += 1
# 添加消息长度高32位
high_bits = bit_length // 4294967296
for i in range(4):
self.chunk.append((high_bits >> (8 * (3 - i))) & 0xFF)
# 添加消息长度低32位
for i in range(4):
self.chunk.append((bit_length >> (8 * (3 - i))) & 0xFF)
def _compress(self, data):
if len(data) < 64:
raise ValueError("compress error: not enough data")
else:
# 消息扩展
w = [0] * 132
# 将字节数组转换为字
for t in range(16):
w[t] = (data[4 * t] << 24) | (data[4 * t + 1] << 16) | (data[4 * t + 2] << 8) | data[4 * t + 3]
w[t] &= 0xFFFFFFFF
# 消息扩展
for j in range(16, 68):
a = w[j - 16] ^ w[j - 9] ^ left_rotate(w[j - 3], 15)
a = a ^ left_rotate(a, 15) ^ left_rotate(a, 23)
w[j] = (a ^ left_rotate(w[j - 13], 7) ^ w[j - 6]) & 0xFFFFFFFF
# 计算w'
for j in range(64):
w[j + 68] = (w[j] ^ w[j + 4]) & 0xFFFFFFFF
# 压缩
a, b, c, d, e, f, g, h = self.reg
for j in range(64):
ss1 = left_rotate((left_rotate(a, 12) + e + left_rotate(get_t_j(j), j)) & 0xFFFFFFFF, 7)
ss2 = ss1 ^ left_rotate(a, 12)
tt1 = (ff_j(j, a, b, c) + d + ss2 + w[j + 68]) & 0xFFFFFFFF
tt2 = (gg_j(j, e, f, g) + h + ss1 + w[j]) & 0xFFFFFFFF
d = c
c = left_rotate(b, 9)
b = a
a = tt1
h = g
g = left_rotate(f, 19)
f = e
e = (tt2 ^ left_rotate(tt2, 9) ^ left_rotate(tt2, 17)) & 0xFFFFFFFF
# 更新寄存器
self.reg[0] ^= a
self.reg[1] ^= b
self.reg[2] ^= c
self.reg[3] ^= d
self.reg[4] ^= e
self.reg[5] ^= f
self.reg[6] ^= g
self.reg[7] ^= h
def sum(self, data=None, output_format=None):
"""
计算哈希值
"""
# 如果提供了输入,则重置并写入
if data is not None:
self.reset()
self.write(data)
self._fill()
# 分块压缩
for f in range(0, len(self.chunk), 64):
self._compress(self.chunk[f:f + 64])
if output_format == 'hex':
# 十六进制输出
result = ''.join(f'{val:08x}' for val in self.reg)
else:
# 字节数组输出
result = []
for f in range(8):
c = self.reg[f]
result.append((c >> 24) & 0xFF)
result.append((c >> 16) & 0xFF)
result.append((c >> 8) & 0xFF)
result.append(c & 0xFF)
self.reset()
return result
def result_encrypt(long_str: str, num: str | None = None) -> str:
# 魔改base64编码表
encoding_tables = {
"s0": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
"s1": "Dkdpgh4ZKsQB80/Mfvw36XI1R25+WUAlEi7NLboqYTOPuzmFjJnryx9HVGcaStCe=",
"s2": "Dkdpgh4ZKsQB80/Mfvw36XI1R25-WUAlEi7NLboqYTOPuzmFjJnryx9HVGcaStCe=",
"s3": "ckdp1h4ZKsUB80/Mfvw36XIgR25+WQAlEi7NLboqYTOPuzmFjJnryx9HVGDaStCe",
"s4": "Dkdpgh2ZmsQB80/MfvV36XI1R45-WUAlEixNLwoqYTOPuzKFjJnry79HbGcaStCe"
}
# 位移常量
masks = [16515072, 258048, 4032, 63] # 对应 0, 1, 2 的掩码添加63作为第四个掩码
shifts = [18, 12, 6, 0] # 对应的位移量
encoding_table = encoding_tables[num]
result = ""
round_num = 0
long_int = get_long_int(round_num, long_str)
total_chars = math.ceil(len(long_str) / 3 * 4)
for i in range(total_chars):
# 每4个字符处理一组3字节
if i // 4 != round_num:
round_num += 1
long_int = get_long_int(round_num, long_str)
# 计算当前位置的索引
index = i % 4
# 使用掩码和位移提取6位值
char_index = (long_int & masks[index]) >> shifts[index]
result += encoding_table[char_index]
return result
def get_long_int(round_num: int, long_str: str) -> int:
round_num = round_num * 3
# 获取字符串中的字符如果超出范围则使用0
char1 = ord(long_str[round_num]) if round_num < len(long_str) else 0
char2 = ord(long_str[round_num + 1]) if round_num + 1 < len(long_str) else 0
char3 = ord(long_str[round_num + 2]) if round_num + 2 < len(long_str) else 0
return (char1 << 16) | (char2 << 8) | char3
def gener_random(random_num: int, option: list[int]) -> list[int]:
byte1 = random_num & 255
byte2 = (random_num >> 8) & 255
return [
(byte1 & 170) | (option[0] & 85), # 偶数位与option[0]的奇数位合并
(byte1 & 85) | (option[0] & 170), # 奇数位与option[0]的偶数位合并
(byte2 & 170) | (option[1] & 85), # 偶数位与option[1]的奇数位合并
(byte2 & 85) | (option[1] & 170), # 奇数位与option[1]的偶数位合并
]
def generate_random_str() -> str:
"""
生成随机字符串
Returns:
随机字符串
"""
# 使用与JS版本相同的固定随机值
random_values = [0.123456789, 0.987654321, 0.555555555]
# 生成三组随机字节并合并
random_bytes = []
random_bytes.extend(gener_random(int(random_values[0] * 10000), [3, 45]))
random_bytes.extend(gener_random(int(random_values[1] * 10000), [1, 0]))
random_bytes.extend(gener_random(int(random_values[2] * 10000), [1, 5]))
return ''.join(chr(b) for b in random_bytes)
def generate_rc4_bb_str(url_search_params: str, user_agent: str, window_env_str: str,
suffix: str = "cus", arguments: list[int] | None = None) -> str:
if arguments is None:
arguments = [0, 1, 14]
sm3 = SM3()
start_time = int(time.time() * 1000)
# 三次加密处理
# 1: url_search_params两次sm3之的结果
url_search_params_list = sm3.sum(sm3.sum(url_search_params + suffix))
# 2: 对后缀两次sm3之的结果
cus = sm3.sum(sm3.sum(suffix))
# 3: 对ua处理之后的结果
ua_key = chr(0) + chr(1) + chr(14) # [1/256, 1, 14]
ua = sm3.sum(result_encrypt(
rc4_encrypt(user_agent, ua_key),
"s3"
))
end_time = start_time + 100
# 构建配置对象
b = {
8: 3,
10: end_time,
15: {
"aid": 6383,
"pageId": 110624,
"boe": False,
"ddrt": 7,
"paths": {
"include": [{} for _ in range(7)],
"exclude": []
},
"track": {
"mode": 0,
"delay": 300,
"paths": []
},
"dump": True,
"rpU": "hwj"
},
16: start_time,
18: 44,
19: [1, 0, 1, 5],
}
def split_to_bytes(num: int) -> list[int]:
return [
(num >> 24) & 255,
(num >> 16) & 255,
(num >> 8) & 255,
num & 255
]
# 处理时间戳
start_time_bytes = split_to_bytes(b[16])
b[20] = start_time_bytes[0]
b[21] = start_time_bytes[1]
b[22] = start_time_bytes[2]
b[23] = start_time_bytes[3]
b[24] = int(b[16] / 256 / 256 / 256 / 256) & 255
b[25] = int(b[16] / 256 / 256 / 256 / 256 / 256) & 255
# 处理Arguments参数
arg0_bytes = split_to_bytes(arguments[0])
b[26] = arg0_bytes[0]
b[27] = arg0_bytes[1]
b[28] = arg0_bytes[2]
b[29] = arg0_bytes[3]
b[30] = int(arguments[1] / 256) & 255
b[31] = (arguments[1] % 256) & 255
arg1_bytes = split_to_bytes(arguments[1])
b[32] = arg1_bytes[0]
b[33] = arg1_bytes[1]
arg2_bytes = split_to_bytes(arguments[2])
b[34] = arg2_bytes[0]
b[35] = arg2_bytes[1]
b[36] = arg2_bytes[2]
b[37] = arg2_bytes[3]
# 处理加密结果
b[38] = url_search_params_list[21]
b[39] = url_search_params_list[22]
b[40] = cus[21]
b[41] = cus[22]
b[42] = ua[23]
b[43] = ua[24]
# 处理结束时间
end_time_bytes = split_to_bytes(b[10])
b[44] = end_time_bytes[0]
b[45] = end_time_bytes[1]
b[46] = end_time_bytes[2]
b[47] = end_time_bytes[3]
b[48] = b[8]
b[49] = int(b[10] / 256 / 256 / 256 / 256) & 255
b[50] = int(b[10] / 256 / 256 / 256 / 256 / 256) & 255
# 处理配置项
b[51] = b[15]['pageId']
page_id_bytes = split_to_bytes(b[15]['pageId'])
b[52] = page_id_bytes[0]
b[53] = page_id_bytes[1]
b[54] = page_id_bytes[2]
b[55] = page_id_bytes[3]
b[56] = b[15]['aid']
b[57] = b[15]['aid'] & 255
b[58] = (b[15]['aid'] >> 8) & 255
b[59] = (b[15]['aid'] >> 16) & 255
b[60] = (b[15]['aid'] >> 24) & 255
# 处理环境信息
window_env_list = [ord(char) for char in window_env_str]
b[64] = len(window_env_list)
b[65] = b[64] & 255
b[66] = (b[64] >> 8) & 255
b[69] = 0
b[70] = 0
b[71] = 0
# 计算校验和
b[72] = b[18] ^ b[20] ^ b[26] ^ b[30] ^ b[38] ^ b[40] ^ b[42] ^ b[21] ^ b[27] ^ b[31] ^ \
b[35] ^ b[39] ^ b[41] ^ b[43] ^ b[22] ^ b[28] ^ b[32] ^ b[36] ^ b[23] ^ b[29] ^ \
b[33] ^ b[37] ^ b[44] ^ b[45] ^ b[46] ^ b[47] ^ b[48] ^ b[49] ^ b[50] ^ b[24] ^ \
b[25] ^ b[52] ^ b[53] ^ b[54] ^ b[55] ^ b[57] ^ b[58] ^ b[59] ^ b[60] ^ b[65] ^ \
b[66] ^ b[70] ^ b[71]
# 构建最终字节数组
bb = [
b[18], b[20], b[52], b[26], b[30], b[34], b[58], b[38], b[40], b[53], b[42], b[21],
b[27], b[54], b[55], b[31], b[35], b[57], b[39], b[41], b[43], b[22], b[28], b[32],
b[60], b[36], b[23], b[29], b[33], b[37], b[44], b[45], b[59], b[46], b[47], b[48],
b[49], b[50], b[24], b[25], b[65], b[66], b[70], b[71]
]
bb.extend(window_env_list)
bb.append(b[72])
return rc4_encrypt(
''.join(chr(byte) for byte in bb),
chr(121)
)
def ab_sign(url_search_params: str, user_agent: str) -> str:
window_env_str = "1920|1080|1920|1040|0|30|0|0|1872|92|1920|1040|1857|92|1|24|Win32"
# 1. 生成随机字符串前缀
# 2. 生成RC4加密的主体部分
# 3. 对结果进行最终加密并添加等号后缀
return result_encrypt(
generate_random_str() +
generate_rc4_bb_str(url_search_params, user_agent, window_env_str),
"s4"
) + "="

View File

View File

@ -0,0 +1,59 @@
# -*- coding: utf-8 -*-
import httpx
from typing import Dict, Any
from .. import utils
OptionalStr = str | None
OptionalDict = Dict[str, Any] | None
async def async_req(
url: str,
proxy_addr: OptionalStr = None,
headers: OptionalDict = None,
data: dict | bytes | None = None,
json_data: dict | list | None = None,
timeout: int = 20,
redirect_url: bool = False,
return_cookies: bool = False,
include_cookies: bool = False,
abroad: bool = False,
content_conding: str = 'utf-8',
verify: bool = False,
http2: bool = True
) -> OptionalDict | OptionalStr | tuple:
if headers is None:
headers = {}
try:
proxy_addr = utils.handle_proxy_addr(proxy_addr)
if data or json_data:
async with httpx.AsyncClient(proxy=proxy_addr, timeout=timeout, verify=verify, http2=http2) as client:
response = await client.post(url, data=data, json=json_data, headers=headers)
else:
async with httpx.AsyncClient(proxy=proxy_addr, timeout=timeout, verify=verify, http2=http2) as client:
response = await client.get(url, headers=headers, follow_redirects=True)
if redirect_url:
return str(response.url)
elif return_cookies:
cookies_dict = {name: value for name, value in response.cookies.items()}
return (response.text, cookies_dict) if include_cookies else cookies_dict
else:
resp_str = response.text
except Exception as e:
resp_str = str(e)
return resp_str
async def get_response_status(url: str, proxy_addr: OptionalStr = None, headers: OptionalDict = None,
timeout: int = 10, abroad: bool = False, verify: bool = False, http2=False) -> bool:
try:
proxy_addr = utils.handle_proxy_addr(proxy_addr)
async with httpx.AsyncClient(proxy=proxy_addr, timeout=timeout, verify=verify) as client:
response = await client.head(url, headers=headers, follow_redirects=True)
return response.status_code == 200
except Exception as e:
print(e)
return False

View File

@ -0,0 +1,88 @@
# -*- coding: utf-8 -*-
import gzip
import urllib.parse
import urllib.error
import requests
import ssl
import json
import urllib.request
no_proxy_handler = urllib.request.ProxyHandler({})
opener = urllib.request.build_opener(no_proxy_handler)
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
OptionalStr = str | None
OptionalDict = dict | None
def sync_req(
url: str,
proxy_addr: OptionalStr = None,
headers: OptionalDict = None,
data: dict | bytes | None = None,
json_data: dict | list | None = None,
timeout: int = 20,
redirect_url: bool = False,
abroad: bool = False,
content_conding: str = 'utf-8'
) -> str:
if headers is None:
headers = {}
try:
if proxy_addr:
proxies = {
'http': proxy_addr,
'https': proxy_addr
}
if data or json_data:
response = requests.post(
url, data=data, json=json_data, headers=headers, proxies=proxies, timeout=timeout
)
else:
response = requests.get(url, headers=headers, proxies=proxies, timeout=timeout)
if redirect_url:
return response.url
resp_str = response.text
else:
if data and not isinstance(data, bytes):
data = urllib.parse.urlencode(data).encode(content_conding)
if json_data and isinstance(json_data, (dict, list)):
data = json.dumps(json_data).encode(content_conding)
req = urllib.request.Request(url, data=data, headers=headers)
try:
if abroad:
response = urllib.request.urlopen(req, timeout=timeout)
else:
response = opener.open(req, timeout=timeout)
if redirect_url:
return response.url
content_encoding = response.info().get('Content-Encoding')
try:
if content_encoding == 'gzip':
with gzip.open(response, 'rt', encoding=content_conding) as gzipped:
resp_str = gzipped.read()
else:
resp_str = response.read().decode(content_conding)
finally:
response.close()
except urllib.error.HTTPError as e:
if e.code == 400:
resp_str = e.read().decode(content_conding)
else:
raise
except urllib.error.URLError as e:
print(f"URL Error: {e}")
raise
except Exception as e:
print(f"An error occurred: {e}")
raise
except Exception as e:
resp_str = str(e)
return resp_str

View File

@ -23,13 +23,16 @@ execute_dir = os.path.split(os.path.realpath(sys.argv[0]))[0]
current_env_path = os.environ.get('PATH')
def unzip_file(zip_path: str | Path, extract_to: str | Path) -> None:
def unzip_file(zip_path: str | Path, extract_to: str | Path, delete: bool = True) -> None:
if not os.path.exists(extract_to):
os.makedirs(extract_to)
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
zip_ref.extractall(extract_to)
if delete and os.path.exists(zip_path):
os.remove(zip_path)
def install_nodejs_windows():
try:
@ -148,14 +151,22 @@ def install_nodejs_mac():
logger.error(f"An unexpected error occurred: {e}")
def get_package_manager():
dist_id = distro.id()
if dist_id in ["centos", "fedora", "rhel", "amzn", "oracle", "scientific", "opencloudos", "alinux"]:
return "RHS"
else:
return "DBS"
def install_nodejs() -> bool:
if current_platform == "Windows":
return install_nodejs_windows()
elif current_platform == "Linux":
dist = distro.id()
if dist.lower() == "centos":
os_type = get_package_manager()
if os_type == "RHS":
return install_nodejs_centos()
elif dist.lower() == "ubuntu":
else:
return install_nodejs_ubuntu()
elif current_platform == "Darwin":
return install_nodejs_mac()
@ -206,4 +217,4 @@ def check_nodejs_installed() -> bool:
def check_node() -> bool:
if not check_nodejs_installed():
return install_nodejs()
return install_nodejs()

33
src/javascript/laixiu.js Normal file
View File

@ -0,0 +1,33 @@
function generateUUID() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
const r = Math.random() * 16 | 0, v = c === 'x' ? r : (r & 0x3 | 0x8);
return v.toString(16);
});
}
function calculateSign() {
const a = new Date().getTime();
const s = generateUUID().replace(/-/g, "");
const u = 'kk792f28d6ff1f34ec702c08626d454b39pro';
const input = "web" + s + a + u;
const hash = CryptoJS.MD5(input).toString();
return {
timestamp: a,
imei: s,
requestId: hash,
inputString: input
};
}
function sign(cryptoJSPath) {
CryptoJS = require(cryptoJSPath);
return calculateSign();
}
module.exports = {
sign
};

143
src/javascript/migu.js Normal file
View File

@ -0,0 +1,143 @@
/**
* Function to get the ddCalcu parameter value
* @param {string} inputUrl - The original URL before encryption
* @returns {Promise<string>} - Returns the calculated ddCalcu value
*/
async function getDdCalcu(inputUrl) {
let wasmInstance = null;
let memory_p = null; // Uint8Array view
let memory_h = null; // Uint32Array view
// Fixed parameter
const f = 'PBTxuWiTEbUPPFcpyxs0ww==';
// Utility function: Convert string to UTF-8 in memory
function stringToUTF8(string, offset) {
const encoder = new TextEncoder();
const encoded = encoder.encode(string);
for (let i = 0; i < encoded.length; i++) {
memory_p[offset + i] = encoded[i];
}
memory_p[offset + encoded.length] = 0; // Null-terminate
}
// Utility function: Read UTF-8 string from memory address
function UTF8ToString(offset) {
let s = '';
let i = 0;
while (memory_p[offset + i]) {
s += String.fromCharCode(memory_p[offset + i]);
i++;
}
return s;
}
// WASM import function stubs
function a(e, t, r, n) {
let s = 0;
for (let i = 0; i < r; i++) {
const d = memory_h[t + 4 >> 2];
t += 8;
s += d;
}
memory_h[n >> 2] = s;
return 0;
}
function b() {}
function c() {}
// Step 1: Retrieve playerVersion
const settingsResp = await fetch('https://app-sc.miguvideo.com/common/v1/settings/H5_DetailPage');
const settingsData = await settingsResp.json();
const playerVersion = JSON.parse(settingsData.body.paramValue).playerVersion;
// Step 2: Load WASM module
const wasmUrl = `https://www.miguvideo.com/mgs/player/prd/${playerVersion}/dist/mgprtcl.wasm`;
const wasmResp = await fetch(wasmUrl);
if (!wasmResp.ok) throw new Error("Failed to download WASM");
const wasmBuffer = await wasmResp.arrayBuffer();
const importObject = {
a: { a, b, c }
};
const { instance } = await WebAssembly.instantiate(wasmBuffer, importObject);
wasmInstance = instance;
const memory = wasmInstance.exports.d;
memory_p = new Uint8Array(memory.buffer);
memory_h = new Uint32Array(memory.buffer);
const exports = {
CallInterface1: wasmInstance.exports.h,
CallInterface2: wasmInstance.exports.i,
CallInterface3: wasmInstance.exports.j,
CallInterface4: wasmInstance.exports.k,
CallInterface6: wasmInstance.exports.m,
CallInterface7: wasmInstance.exports.n,
CallInterface8: wasmInstance.exports.o,
CallInterface9: wasmInstance.exports.p,
CallInterface10: wasmInstance.exports.q,
CallInterface11: wasmInstance.exports.r,
CallInterface14: wasmInstance.exports.t,
malloc: wasmInstance.exports.u,
};
const parsedUrl = new URL(inputUrl);
const query = Object.fromEntries(parsedUrl.searchParams);
const o = query.userid || '';
const a_val = query.timestamp || '';
const s = query.ProgramID || '';
const u = query.Channel_ID || '';
const v = query.puData || '';
// Allocate memory
const d = exports.malloc(o.length + 1);
const h = exports.malloc(a_val.length + 1);
const y = exports.malloc(s.length + 1);
const m = exports.malloc(u.length + 1);
const g = exports.malloc(v.length + 1);
const b_val = exports.malloc(f.length + 1);
const E = exports.malloc(128);
const T = exports.malloc(128);
// Write data to memory
stringToUTF8(o, d);
stringToUTF8(a_val, h);
stringToUTF8(s, y);
stringToUTF8(u, m);
stringToUTF8(v, g);
stringToUTF8(f, b_val);
// Call interface functions
const S = exports.CallInterface6(); // Create context
exports.CallInterface1(S, y, s.length);
exports.CallInterface10(S, h, a_val.length);
exports.CallInterface9(S, d, o.length);
exports.CallInterface3(S, 0, 0);
exports.CallInterface11(S, 0, 0);
exports.CallInterface8(S, g, v.length);
exports.CallInterface2(S, m, u.length);
exports.CallInterface14(S, b_val, f.length, T, 128);
const w = UTF8ToString(T);
const I = exports.malloc(w.length + 1);
stringToUTF8(w, I);
exports.CallInterface7(S, I, w.length);
exports.CallInterface4(S, E, 128);
return UTF8ToString(E);
}
const url = process.argv[2];
getDdCalcu(url).then(result => {
console.log(result);
}).catch(err => {
console.error(err);
process.exit(1);
});

View File

@ -0,0 +1,78 @@
function sign(e) {
function t(e, t) {
return e << t | e >>> 32 - t
}
function o(e, t) {
var o, n, r, i, a;
return r = 2147483648 & e,
i = 2147483648 & t,
a = (1073741823 & e) + (1073741823 & t),
(o = 1073741824 & e) & (n = 1073741824 & t) ? 2147483648 ^ a ^ r ^ i : o | n ? 1073741824 & a ? 3221225472 ^ a ^ r ^ i : 1073741824 ^ a ^ r ^ i : a ^ r ^ i
}
function n(e, n, r, i, a, s, u) {
return o(t(e = o(e, o(o(function(e, t, o) {
return e & t | ~e & o
}(n, r, i), a), u)), s), n)
}
function r(e, n, r, i, a, s, u) {
return o(t(e = o(e, o(o(function(e, t, o) {
return e & o | t & ~o
}(n, r, i), a), u)), s), n)
}
function i(e, n, r, i, a, s, u) {
return o(t(e = o(e, o(o(function(e, t, o) {
return e ^ t ^ o
}(n, r, i), a), u)), s), n)
}
function a(e, n, r, i, a, s, u) {
return o(t(e = o(e, o(o(function(e, t, o) {
return t ^ (e | ~o)
}(n, r, i), a), u)), s), n)
}
function s(e) {
var t, o = "", n = "";
for (t = 0; 3 >= t; t++)
o += (n = "0" + (e >>> 8 * t & 255).toString(16)).substr(n.length - 2, 2);
return o
}
var u, l, d, c, p, f, h, m, y, g;
for (g = function(e) {
for (var t = e.length, o = t + 8, n = 16 * ((o - o % 64) / 64 + 1), r = Array(n - 1), i = 0, a = 0; t > a; )
i = a % 4 * 8,
r[(a - a % 4) / 4] |= e.charCodeAt(a) << i,
a++;
return i = a % 4 * 8,
r[(a - a % 4) / 4] |= 128 << i,
r[n - 2] = t << 3,
r[n - 1] = t >>> 29,
r
}(e = function(e) {
var t = String.fromCharCode;
e = e.replace(/\r\n/g, "\n");
for (var o, n = "", r = 0; r < e.length; r++)
128 > (o = e.charCodeAt(r)) ? n += t(o) : o > 127 && 2048 > o ? (n += t(o >> 6 | 192),
n += t(63 & o | 128)) : (n += t(o >> 12 | 224),
n += t(o >> 6 & 63 | 128),
n += t(63 & o | 128));
return n
}(e)),
f = 1732584193,
h = 4023233417,
m = 2562383102,
y = 271733878,
u = 0; u < g.length; u += 16)
l = f,
d = h,
c = m,
p = y,
h = a(h = a(h = a(h = a(h = i(h = i(h = i(h = i(h = r(h = r(h = r(h = r(h = n(h = n(h = n(h = n(h, m = n(m, y = n(y, f = n(f, h, m, y, g[u + 0], 7, 3614090360), h, m, g[u + 1], 12, 3905402710), f, h, g[u + 2], 17, 606105819), y, f, g[u + 3], 22, 3250441966), m = n(m, y = n(y, f = n(f, h, m, y, g[u + 4], 7, 4118548399), h, m, g[u + 5], 12, 1200080426), f, h, g[u + 6], 17, 2821735955), y, f, g[u + 7], 22, 4249261313), m = n(m, y = n(y, f = n(f, h, m, y, g[u + 8], 7, 1770035416), h, m, g[u + 9], 12, 2336552879), f, h, g[u + 10], 17, 4294925233), y, f, g[u + 11], 22, 2304563134), m = n(m, y = n(y, f = n(f, h, m, y, g[u + 12], 7, 1804603682), h, m, g[u + 13], 12, 4254626195), f, h, g[u + 14], 17, 2792965006), y, f, g[u + 15], 22, 1236535329), m = r(m, y = r(y, f = r(f, h, m, y, g[u + 1], 5, 4129170786), h, m, g[u + 6], 9, 3225465664), f, h, g[u + 11], 14, 643717713), y, f, g[u + 0], 20, 3921069994), m = r(m, y = r(y, f = r(f, h, m, y, g[u + 5], 5, 3593408605), h, m, g[u + 10], 9, 38016083), f, h, g[u + 15], 14, 3634488961), y, f, g[u + 4], 20, 3889429448), m = r(m, y = r(y, f = r(f, h, m, y, g[u + 9], 5, 568446438), h, m, g[u + 14], 9, 3275163606), f, h, g[u + 3], 14, 4107603335), y, f, g[u + 8], 20, 1163531501), m = r(m, y = r(y, f = r(f, h, m, y, g[u + 13], 5, 2850285829), h, m, g[u + 2], 9, 4243563512), f, h, g[u + 7], 14, 1735328473), y, f, g[u + 12], 20, 2368359562), m = i(m, y = i(y, f = i(f, h, m, y, g[u + 5], 4, 4294588738), h, m, g[u + 8], 11, 2272392833), f, h, g[u + 11], 16, 1839030562), y, f, g[u + 14], 23, 4259657740), m = i(m, y = i(y, f = i(f, h, m, y, g[u + 1], 4, 2763975236), h, m, g[u + 4], 11, 1272893353), f, h, g[u + 7], 16, 4139469664), y, f, g[u + 10], 23, 3200236656), m = i(m, y = i(y, f = i(f, h, m, y, g[u + 13], 4, 681279174), h, m, g[u + 0], 11, 3936430074), f, h, g[u + 3], 16, 3572445317), y, f, g[u + 6], 23, 76029189), m = i(m, y = i(y, f = i(f, h, m, y, g[u + 9], 4, 3654602809), h, m, g[u + 12], 11, 3873151461), f, h, g[u + 15], 16, 530742520), y, f, g[u + 2], 23, 3299628645), m = a(m, y = a(y, f = a(f, h, m, y, g[u + 0], 6, 4096336452), h, m, g[u + 7], 10, 1126891415), f, h, g[u + 14], 15, 2878612391), y, f, g[u + 5], 21, 4237533241), m = a(m, y = a(y, f = a(f, h, m, y, g[u + 12], 6, 1700485571), h, m, g[u + 3], 10, 2399980690), f, h, g[u + 10], 15, 4293915773), y, f, g[u + 1], 21, 2240044497), m = a(m, y = a(y, f = a(f, h, m, y, g[u + 8], 6, 1873313359), h, m, g[u + 15], 10, 4264355552), f, h, g[u + 6], 15, 2734768916), y, f, g[u + 13], 21, 1309151649), m = a(m, y = a(y, f = a(f, h, m, y, g[u + 4], 6, 4149444226), h, m, g[u + 11], 10, 3174756917), f, h, g[u + 2], 15, 718787259), y, f, g[u + 9], 21, 3951481745),
f = o(f, l),
h = o(h, d),
m = o(m, c),
y = o(y, p);
return (s(f) + s(h) + s(m) + s(y)).toLowerCase()
}
// 正确sign值05748e8359cd3e6deaab02d15caafc11
// var sg =sign('5655b7041ca049730330701082886efd&1719411639403&12574478&{"componentKey":"wp_pc_shop_basic_info","params":"{\\"memberId\\":\\"b2b-22133374292418351a\\"}"}')
// console.log(sg)

View File

@ -2,22 +2,31 @@
import os
import sys
from loguru import logger
logger.remove()
custom_format = "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | <level>{level: <8}</level> - <level>{message}</level>"
os.environ["LOGURU_FORMAT"] = custom_format
from loguru import logger
logger.add(
sink=sys.stderr,
format=custom_format,
level="DEBUG",
colorize=True,
enqueue=True
)
script_path = os.path.split(os.path.realpath(sys.argv[0]))[0]
logger.add(
f"{script_path}/logs/DouyinLiveRecorder.log",
f"{script_path}/logs/streamget.log",
level="DEBUG",
format="{time:YYYY-MM-DD HH:mm:ss.SSS} | {level: <8} | {name}:{function}:{line} - {message}",
filter=lambda i: i["level"].name == "DEBUG",
filter=lambda i: i["level"].name != "INFO",
serialize=False,
enqueue=True,
retention=1,
rotation="100 KB",
rotation="300 KB",
encoding='utf-8'
)
@ -32,14 +41,3 @@ logger.add(
rotation="300 KB",
encoding='utf-8'
)
logger.add(
f"{script_path}/logs/DouyinLiveRecorder.log",
level="WARNING",
format="{time:YYYY-MM-DD HH:mm:ss.SSS} | {level: <8} | {name}:{function}:{line} - {message}",
serialize=False,
enqueue=True,
retention=1,
rotation="100 KB",
encoding='utf-8'
)

92
src/proxy.py Normal file
View File

@ -0,0 +1,92 @@
import os
import sys
from enum import Enum, auto
from dataclasses import dataclass, field
from .utils import logger
class ProxyType(Enum):
HTTP = auto()
HTTPS = auto()
SOCKS = auto()
@dataclass(frozen=True)
class ProxyInfo:
ip: str = field(default="", repr=True)
port: str = field(default="", repr=True)
def __post_init__(self):
if (self.ip and not self.port) or (not self.ip and self.port):
raise ValueError("IP or port cannot be empty")
if (self.ip and self.port) and (not self.port.isdigit() or not (1 <= int(self.port) <= 65535)):
raise ValueError("Port must be a digit between 1 and 65535")
class ProxyDetector:
def __init__(self):
if sys.platform.startswith('win'):
import winreg
self.winreg = winreg
self.__path = r'Software\Microsoft\Windows\CurrentVersion\Internet Settings'
with winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER) as key_user:
self.__INTERNET_SETTINGS = winreg.OpenKeyEx(key_user, self.__path, 0, winreg.KEY_ALL_ACCESS)
else:
self.__is_windows = False
def get_proxy_info(self) -> ProxyInfo:
if sys.platform.startswith('win'):
ip, port = self._get_proxy_info_windows()
else:
ip, port = self._get_proxy_info_linux()
return ProxyInfo(ip, port)
def is_proxy_enabled(self) -> bool:
if sys.platform.startswith('win'):
return self._is_proxy_enabled_windows()
else:
return self._is_proxy_enabled_linux()
def _get_proxy_info_windows(self) -> tuple[str, str]:
ip, port = "", ""
if self._is_proxy_enabled_windows():
try:
ip_port = self.winreg.QueryValueEx(self.__INTERNET_SETTINGS, "ProxyServer")[0]
if ip_port:
ip, port = ip_port.split(":")
except FileNotFoundError as err:
logger.warning("No proxy information found: " + str(err))
except Exception as err:
logger.error("An error occurred: " + str(err))
else:
logger.debug("No proxy is enabled on the system")
return ip, port
def _is_proxy_enabled_windows(self) -> bool:
try:
if self.winreg.QueryValueEx(self.__INTERNET_SETTINGS, "ProxyEnable")[0] == 1:
return True
except FileNotFoundError as err:
print("No proxy information found: " + str(err))
except Exception as err:
print("An error occurred: " + str(err))
return False
@staticmethod
def _get_proxy_info_linux() -> tuple[str, str]:
proxies = {
'http': os.getenv('http_proxy'),
'https': os.getenv('https_proxy'),
'ftp': os.getenv('ftp_proxy')
}
ip = port = ""
for proto, proxy in proxies.items():
if proxy:
ip, port = proxy.split(':')
break
return ip, port
def _is_proxy_enabled_linux(self) -> bool:
proxies = self._get_proxy_info_linux()
return any(proxy != '' for proxy in proxies)

150
src/room.py Normal file
View File

@ -0,0 +1,150 @@
# -*- encoding: utf-8 -*-
"""
Author: Hmily
GitHub:https://github.com/ihmily
Date: 2023-07-17 23:52:05
Update: 2025-02-04 04:57:00
Copyright (c) 2023 by Hmily, All Rights Reserved.
"""
import re
import urllib.parse
import execjs
import httpx
import urllib.request
from . import JS_SCRIPT_PATH, utils
no_proxy_handler = urllib.request.ProxyHandler({})
opener = urllib.request.build_opener(no_proxy_handler)
class UnsupportedUrlError(Exception):
pass
HEADERS = {
'User-Agent': 'Mozilla/5.0 (Linux; Android 11; SAMSUNG SM-G973U) AppleWebKit/537.36 (KHTML, like Gecko) '
'SamsungBrowser/14.2 Chrome/87.0.4280.141 Mobile Safari/537.36',
'Accept-Language': 'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
'Cookie': 's_v_web_id=verify_lk07kv74_QZYCUApD_xhiB_405x_Ax51_GYO9bUIyZQVf'
}
HEADERS_PC = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/121.0.0.0 Safari/537.36 Edg/121.0.0.0',
'Cookie': 'sessionid=7494ae59ae06784454373ce25761e864; __ac_nonce=0670497840077ee4c9eb2; '
'__ac_signature=_02B4Z6wo00f012DZczQAAIDCJJBb3EjnINdg-XeAAL8-db; '
's_v_web_id=verify_m1ztgtjj_vuHnMLZD_iwZ9_4YO4_BdN1_7wLP3pyqXsf2; '
}
# X-bogus算法
async def get_xbogus(url: str, headers: dict | None = None) -> str:
if not headers or 'user-agent' not in (k.lower() for k in headers):
headers = HEADERS
query = urllib.parse.urlparse(url).query
xbogus = execjs.compile(open(f'{JS_SCRIPT_PATH}/x-bogus.js').read()).call(
'sign', query, headers.get("User-Agent", "user-agent"))
return xbogus
# 获取房间ID和用户secID
async def get_sec_user_id(url: str, proxy_addr: str | None = None, headers: dict | None = None) -> tuple | None:
if not headers or all(k.lower() not in ['user-agent', 'cookie'] for k in headers):
headers = HEADERS
try:
proxy_addr = utils.handle_proxy_addr(proxy_addr)
async with httpx.AsyncClient(proxy=proxy_addr, timeout=15) as client:
response = await client.get(url, headers=headers, follow_redirects=True)
redirect_url = response.url
if 'reflow/' in str(redirect_url):
match = re.search(r'sec_user_id=([\w_\-]+)&', str(redirect_url))
if match:
sec_user_id = match.group(1)
room_id = str(redirect_url).split('?')[0].rsplit('/', maxsplit=1)[1]
return room_id, sec_user_id
else:
raise RuntimeError("Could not find sec_user_id in the URL.")
else:
raise UnsupportedUrlError("The redirect URL does not contain 'reflow/'.")
except UnsupportedUrlError as e:
raise e
except Exception as e:
raise RuntimeError(f"An error occurred: {e}")
# 获取抖音号
async def get_unique_id(url: str, proxy_addr: str | None = None, headers: dict | None = None) -> str | None:
if not headers or all(k.lower() not in ['user-agent', 'cookie'] for k in headers):
headers = HEADERS
try:
proxy_addr = utils.handle_proxy_addr(proxy_addr)
async with httpx.AsyncClient(proxy=proxy_addr, timeout=15) as client:
response = await client.get(url, headers=headers, follow_redirects=True)
redirect_url = str(response.url)
if 'reflow/' in str(redirect_url):
raise UnsupportedUrlError("Unsupported URL")
sec_user_id = redirect_url.split('?')[0].rsplit('/', maxsplit=1)[1]
headers['Cookie'] = ('ttwid=1%7C4ejCkU2bKY76IySQENJwvGhg1IQZrgGEupSyTKKfuyk%7C1740470403%7Cbc9a'
'd2ee341f1a162f9e27f4641778030d1ae91e31f9df6553a8f2efa3bdb7b4; __ac_nonce=06'
'83e59f3009cc48fbab0; __ac_signature=_02B4Z6wo00f01mG6waQAAIDB9JUCzFb6.TZhmsU'
'AAPBf34; __ac_referer=__ac_blank')
user_page_response = await client.get(f'https://www.iesdouyin.com/share/user/{sec_user_id}',
headers=headers, follow_redirects=True)
matches = re.findall(r'unique_id":"(.*?)","verification_type', user_page_response.text)
if matches:
unique_id = matches[-1]
return unique_id
else:
raise RuntimeError("Could not find unique_id in the response.")
except UnsupportedUrlError as e:
raise e
except Exception as e:
raise RuntimeError(f"An error occurred: {e}")
# 获取直播间webID
async def get_live_room_id(room_id: str, sec_user_id: str, proxy_addr: str | None = None, params: dict | None = None,
headers: dict | None = None) -> str:
if not headers or all(k.lower() not in ['user-agent', 'cookie'] for k in headers):
headers = HEADERS
if not params:
params = {
"verifyFp": "verify_lk07kv74_QZYCUApD_xhiB_405x_Ax51_GYO9bUIyZQVf",
"type_id": "0",
"live_id": "1",
"room_id": room_id,
"sec_user_id": sec_user_id,
"app_id": "1128",
"msToken": "wrqzbEaTlsxt52-vxyZo_mIoL0RjNi1ZdDe7gzEGMUTVh_HvmbLLkQrA_1HKVOa2C6gkxb6IiY6TY2z8enAkPEwGq--gM"
"-me3Yudck2ailla5Q4osnYIHxd9dI4WtQ==",
}
api = f'https://webcast.amemv.com/webcast/room/reflow/info/?{urllib.parse.urlencode(params)}'
xbogus = await get_xbogus(api)
api = api + "&X-Bogus=" + xbogus
try:
proxy_addr = utils.handle_proxy_addr(proxy_addr)
async with httpx.AsyncClient(proxy=proxy_addr,
timeout=15) as client:
response = await client.get(api, headers=headers)
response.raise_for_status()
json_data = response.json()
return json_data['data']['room']['owner']['web_rid']
except httpx.HTTPStatusError as e:
print(f"HTTP status error occurred: {e.response.status_code}")
raise
except Exception as e:
print(f"An exception occurred during get_live_room_id: {e}")
raise
if __name__ == '__main__':
room_url = "https://v.douyin.com/iQLgKSj/"
_room_id, sec_uid = get_sec_user_id(room_url)
web_rid = get_live_room_id(_room_id, sec_uid)
print("return web_rid:", web_rid)

File diff suppressed because it is too large Load Diff

View File

@ -4,8 +4,8 @@
Author: Hmily
GitHub: https://github.com/ihmily
Date: 2023-07-15 23:15:00
Update: 2024-10-27 17:15:00
Copyright (c) 2023-2024 by Hmily, All Rights Reserved.
Update: 2025-02-06 02:28:00
Copyright (c) 2023-2025 by Hmily, All Rights Reserved.
Function: Get live stream data.
"""
import base64
@ -21,10 +21,24 @@ from .utils import trace_error_decorator
from .spider import (
get_douyu_stream_data, get_bilibili_stream_data
)
from .http_clients.async_http import get_response_status
QUALITY_MAPPING = {"OD": 0, "BD": 0, "UHD": 1, "HD": 2, "SD": 3, "LD": 4}
def get_quality_index(quality) -> tuple:
if not quality:
return list(QUALITY_MAPPING.items())[0]
quality_str = str(quality).upper()
if quality_str.isdigit():
quality_int = int(quality_str[0])
quality_str = list(QUALITY_MAPPING.keys())[quality_int]
return quality_str, QUALITY_MAPPING.get(quality_str, 0)
@trace_error_decorator
def get_douyin_stream_url(json_data: dict, video_quality: str) -> dict:
async def get_douyin_stream_url(json_data: dict, video_quality: str, proxy_addr: str) -> dict:
anchor_name = json_data.get('anchor_name')
result = {
@ -32,7 +46,7 @@ def get_douyin_stream_url(json_data: dict, video_quality: str) -> dict:
"is_live": False,
}
status = json_data.get("status", 4) # 直播状态 2 是正在直播、4 是未开播
status = json_data.get("status", 4)
if status == 2:
stream_url = json_data['stream_url']
@ -45,20 +59,27 @@ def get_douyin_stream_url(json_data: dict, video_quality: str) -> dict:
flv_url_list.append(flv_url_list[-1])
m3u8_url_list.append(m3u8_url_list[-1])
video_qualities = {"原画": 0, "蓝光": 0, "超清": 1, "高清": 2, "标清": 3, "流畅": 4}
quality_index = video_qualities.get(video_quality)
video_quality, quality_index = get_quality_index(video_quality)
m3u8_url = m3u8_url_list[quality_index]
flv_url = flv_url_list[quality_index]
result['title'] = json_data['title']
result['m3u8_url'] = m3u8_url
result['flv_url'] = flv_url
result['is_live'] = True
result['record_url'] = m3u8_url
ok = await get_response_status(url=m3u8_url, proxy_addr=proxy_addr)
if not ok:
index = quality_index + 1 if quality_index < 4 else quality_index - 1
m3u8_url = m3u8_url_list[index]
flv_url = flv_url_list[index]
result |= {
'is_live': True,
'title': json_data['title'],
'quality': video_quality,
'm3u8_url': m3u8_url,
'flv_url': flv_url,
'record_url': m3u8_url or flv_url,
}
return result
@trace_error_decorator
def get_tiktok_stream_url(json_data: dict, video_quality: str) -> dict:
async def get_tiktok_stream_url(json_data: dict, video_quality: str, proxy_addr: str) -> dict:
if not json_data:
return {"anchor_name": None, "is_live": False}
@ -66,10 +87,18 @@ def get_tiktok_stream_url(json_data: dict, video_quality: str) -> dict:
play_list = []
for key in stream:
url_info = stream[key]['main']
play_url = url_info[q_key]
sdk_params = url_info['sdk_params']
sdk_params = json.loads(sdk_params)
vbitrate = int(sdk_params['vbitrate'])
v_codec = sdk_params.get('VCodec', '')
play_url = ''
if url_info.get(q_key):
if url_info[q_key].endswith(".flv") or url_info[q_key].endswith(".m3u8"):
play_url = url_info[q_key] + '?codec=' + v_codec
else:
play_url = url_info[q_key] + '&codec=' + v_codec
resolution = sdk_params['resolution']
if vbitrate != 0 and resolution:
width, height = map(int, resolution.split('x'))
@ -99,18 +128,33 @@ def get_tiktok_stream_url(json_data: dict, video_quality: str) -> dict:
flv_url_list.append(flv_url_list[-1])
while len(m3u8_url_list) < 5:
m3u8_url_list.append(m3u8_url_list[-1])
video_qualities = {"原画": 0, "蓝光": 0, "超清": 1, "高清": 2, "标清": 3, '流畅': 4}
quality_index = video_qualities.get(video_quality)
result['title'] = live_room['liveRoom']['title']
result['flv_url'] = flv_url_list[quality_index]['url']
result['m3u8_url'] = m3u8_url_list[quality_index]['url']
result['is_live'] = True
result['record_url'] = flv_url_list[quality_index]['url'].replace("https://", "http://")
video_quality, quality_index = get_quality_index(video_quality)
flv_dict: dict = flv_url_list[quality_index]
m3u8_dict: dict = m3u8_url_list[quality_index]
check_url = m3u8_dict.get('url') or flv_dict.get('url')
ok = await get_response_status(url=check_url, proxy_addr=proxy_addr, http2=False)
if not ok:
index = quality_index + 1 if quality_index < 4 else quality_index - 1
flv_dict: dict = flv_url_list[index]
m3u8_dict: dict = m3u8_url_list[index]
flv_url = flv_dict['url']
m3u8_url = m3u8_dict['url']
result |= {
'is_live': True,
'title': live_room['liveRoom']['title'],
'quality': video_quality,
'm3u8_url': m3u8_url,
'flv_url': flv_url,
'record_url': m3u8_url or flv_url,
}
return result
@trace_error_decorator
def get_kuaishou_stream_url(json_data: dict, video_quality: str) -> dict:
async def get_kuaishou_stream_url(json_data: dict, video_quality: str) -> dict:
if json_data['type'] == 1 and not json_data["is_live"]:
return json_data
live_status = json_data['is_live']
@ -122,11 +166,10 @@ def get_kuaishou_stream_url(json_data: dict, video_quality: str) -> dict:
}
if live_status:
quality_mapping = {'原画': 0, '蓝光': 0, '超清': 1, '高清': 2, '标清': 3, '流畅': 4}
quality_mapping_bit = {'OD': 99999, 'BD': 4000, 'UHD': 2000, 'HD': 1000, 'SD': 800, 'LD': 600}
if video_quality in QUALITY_MAPPING:
if video_quality in quality_mapping:
quality_index = quality_mapping[video_quality]
quality, quality_index = get_quality_index(video_quality)
if 'm3u8_url_list' in json_data:
m3u8_url_list = json_data['m3u8_url_list'][::-1]
while len(m3u8_url_list) < 5:
@ -135,20 +178,38 @@ def get_kuaishou_stream_url(json_data: dict, video_quality: str) -> dict:
result['m3u8_url'] = m3u8_url
if 'flv_url_list' in json_data:
flv_url_list = json_data['flv_url_list'][::-1]
while len(flv_url_list) < 5:
flv_url_list.append(flv_url_list[-1])
flv_url = flv_url_list[quality_index]['url']
result['flv_url'] = flv_url
result['record_url'] = flv_url
if 'bitrate' in json_data['flv_url_list'][0]:
flv_url_list = json_data['flv_url_list']
flv_url_list = sorted(flv_url_list, key=lambda x: x['bitrate'], reverse=True)
quality_str = str(video_quality).upper()
if quality_str.isdigit():
video_quality, quality_index_bitrate_value = list(quality_mapping_bit.items())[int(quality_str)]
else:
quality_index_bitrate_value = quality_mapping_bit.get(quality_str, 99999)
video_quality = quality_str
quality_index = next(
(i for i, x in enumerate(flv_url_list) if x['bitrate'] <= quality_index_bitrate_value), None)
if quality_index is None:
quality_index = len(flv_url_list) - 1
flv_url = flv_url_list[quality_index]['url']
result['flv_url'] = flv_url
result['record_url'] = flv_url
else:
flv_url_list = json_data['flv_url_list'][::-1]
while len(flv_url_list) < 5:
flv_url_list.append(flv_url_list[-1])
flv_url = flv_url_list[quality_index]['url']
result |= {'flv_url': flv_url, 'record_url': flv_url}
result['is_live'] = True
result['quality'] = video_quality
return result
@trace_error_decorator
def get_huya_stream_url(json_data: dict, video_quality: str) -> dict:
async def get_huya_stream_url(json_data: dict, video_quality: str) -> dict:
game_live_info = json_data['data'][0]['gameLiveInfo']
live_title = game_live_info['introduction']
stream_info_list = json_data['data'][0]['gameStreamInfoList']
anchor_name = game_live_info.get('nick', '')
@ -207,17 +268,17 @@ def get_huya_stream_url(json_data: dict, video_quality: str) -> dict:
m3u8_url = f'{hls_url}/{stream_name}.{hls_url_suffix}?{new_anti_code}&ratio='
quality_list = flv_anti_code.split('&exsphd=')
if len(quality_list) > 1 and video_quality not in ["原画", "蓝光"]:
if len(quality_list) > 1 and video_quality not in ["OD", "BD"]:
pattern = r"(?<=264_)\d+"
quality_list = list(re.findall(pattern, quality_list[1]))[::-1]
while len(quality_list) < 5:
quality_list.append(quality_list[-1])
video_quality_options = {
"超清": quality_list[0],
"高清": quality_list[1],
"标清": quality_list[2],
"流畅": quality_list[3]
"UHD": quality_list[0],
"HD": quality_list[1],
"SD": quality_list[2],
"LD": quality_list[3]
}
if video_quality not in video_quality_options:
@ -227,43 +288,45 @@ def get_huya_stream_url(json_data: dict, video_quality: str) -> dict:
flv_url = flv_url + str(video_quality_options[video_quality])
m3u8_url = m3u8_url + str(video_quality_options[video_quality])
result['title'] = game_live_info['introduction']
result['flv_url'] = flv_url
result['m3u8_url'] = m3u8_url
result['is_live'] = True
result['record_url'] = flv_url
result |= {
'is_live': True,
'title': live_title,
'quality': video_quality,
'm3u8_url': m3u8_url,
'flv_url': flv_url,
'record_url': flv_url or m3u8_url
}
return result
@trace_error_decorator
def get_douyu_stream_url(json_data: dict, video_quality: str, cookies: str, proxy_addr: str) -> dict:
async def get_douyu_stream_url(json_data: dict, video_quality: str, cookies: str, proxy_addr: str) -> dict:
if not json_data["is_live"]:
return json_data
video_quality_options = {
"原画": '0',
"蓝光": '0',
"超清": '3',
"高清": '2',
"标清": '1',
"流畅": '1'
"OD": '0',
"BD": '0',
"UHD": '3',
"HD": '2',
"SD": '1',
"LD": '1'
}
rid = str(json_data["room_id"])
json_data.pop("room_id")
rate = video_quality_options.get(video_quality, '0')
flv_data = get_douyu_stream_data(rid, rate, cookies=cookies, proxy_addr=proxy_addr)
flv_data = await get_douyu_stream_data(rid, rate, cookies=cookies, proxy_addr=proxy_addr)
rtmp_url = flv_data['data'].get('rtmp_url')
rtmp_live = flv_data['data'].get('rtmp_live')
if rtmp_live:
flv_url = f'{rtmp_url}/{rtmp_live}'
json_data['flv_url'] = flv_url
json_data['record_url'] = flv_url
json_data |= {'quality': video_quality, 'flv_url': flv_url, 'record_url': flv_url}
return json_data
@trace_error_decorator
def get_yy_stream_url(json_data: dict) -> dict:
async def get_yy_stream_url(json_data: dict) -> dict:
anchor_name = json_data.get('anchor_name', '')
result = {
"anchor_name": anchor_name,
@ -273,15 +336,18 @@ def get_yy_stream_url(json_data: dict) -> dict:
stream_line_addr = json_data['avp_info_res']['stream_line_addr']
cdn_info = list(stream_line_addr.values())[0]
flv_url = cdn_info['cdn_info']['url']
result['title'] = json_data['title']
result['flv_url'] = flv_url
result['is_live'] = True
result['record_url'] = flv_url
result |= {
'is_live': True,
'title': json_data['title'],
'quality': 'OD',
'flv_url': flv_url,
'record_url': flv_url
}
return result
@trace_error_decorator
def get_bilibili_stream_url(json_data: dict, video_quality: str, proxy_addr: str, cookies: str) -> dict:
async def get_bilibili_stream_url(json_data: dict, video_quality: str, proxy_addr: str, cookies: str) -> dict:
anchor_name = json_data["anchor_name"]
if not json_data["live_status"]:
return {
@ -292,70 +358,89 @@ def get_bilibili_stream_url(json_data: dict, video_quality: str, proxy_addr: str
room_url = json_data['room_url']
video_quality_options = {
"原画": '10000',
"蓝光": '400',
"超清": '250',
"高清": '150',
"标清": '80',
"流畅": '80'
"OD": '10000',
"BD": '400',
"UHD": '250',
"HD": '150',
"SD": '80',
"LD": '80'
}
select_quality = video_quality_options[video_quality]
play_url = get_bilibili_stream_data(
play_url = await get_bilibili_stream_data(
room_url, qn=select_quality, platform='web', proxy_addr=proxy_addr, cookies=cookies)
return {
'anchor_name': json_data['anchor_name'],
'is_live': True,
'title': json_data['title'],
'quality': video_quality,
'record_url': play_url
}
@trace_error_decorator
def get_netease_stream_url(json_data: dict, video_quality: str) -> dict:
async def get_netease_stream_url(json_data: dict, video_quality: str) -> dict:
if not json_data['is_live']:
return json_data
stream_list = json_data['stream_list']['resolution']
order = ['blueray', 'ultra', 'high', 'standard']
sorted_keys = [key for key in order if key in stream_list]
while len(sorted_keys) < 5:
sorted_keys.append(sorted_keys[-1])
quality_list = {'原画': 0, '蓝光': 0, '超清': 1, '高清': 2, '标清': 3, '流畅': 4}
selected_quality = sorted_keys[quality_list[video_quality]]
flv_url_list = stream_list[selected_quality]['cdn']
selected_cdn = list(flv_url_list.keys())[0]
flv_url = flv_url_list[selected_cdn]
m3u8_url = json_data['m3u8_url']
flv_url = None
if json_data.get('stream_list'):
stream_list = json_data['stream_list']['resolution']
order = ['blueray', 'ultra', 'high', 'standard']
sorted_keys = [key for key in order if key in stream_list]
while len(sorted_keys) < 5:
sorted_keys.append(sorted_keys[-1])
video_quality, quality_index = get_quality_index(video_quality)
selected_quality = sorted_keys[quality_index]
flv_url_list = stream_list[selected_quality]['cdn']
selected_cdn = list(flv_url_list.keys())[0]
flv_url = flv_url_list[selected_cdn]
return {
"is_live": True,
"anchor_name": json_data['anchor_name'],
"title": json_data['title'],
'quality': video_quality,
"m3u8_url": m3u8_url,
"flv_url": flv_url,
"record_url": flv_url
"record_url": flv_url or m3u8_url
}
def get_stream_url(json_data: dict, video_quality: str, url_type: str = 'm3u8', spec: bool = False,
extra_key: str | int = None) -> dict:
async def get_stream_url(json_data: dict, video_quality: str, url_type: str = 'm3u8', spec: bool = False,
hls_extra_key: str | int = None, flv_extra_key: str | int = None) -> dict:
if not json_data['is_live']:
return json_data
play_url_list = json_data['play_url_list']
quality_list = {'原画': 0, '蓝光': 0, '超清': 1, '高清': 2, '标清': 3, '流畅': 4}
while len(play_url_list) < 5:
play_url_list.append(play_url_list[-1])
selected_quality = quality_list[video_quality]
video_quality, selected_quality = get_quality_index(video_quality)
data = {
"anchor_name": json_data['anchor_name'],
"is_live": True
}
if url_type == 'm3u8':
m3u8_url = play_url_list[selected_quality][extra_key] if extra_key else play_url_list[selected_quality]
data["m3u8_url"] = json_data['m3u8_url'] if spec else m3u8_url
data["record_url"] = m3u8_url
def get_url(key):
play_url = play_url_list[selected_quality]
return play_url[key] if key else play_url
if url_type == 'all':
m3u8_url = get_url(hls_extra_key)
flv_url = get_url(flv_extra_key)
data |= {
"m3u8_url": json_data['m3u8_url'] if spec else m3u8_url,
"flv_url": json_data['flv_url'] if spec else flv_url,
"record_url": m3u8_url
}
elif url_type == 'm3u8':
m3u8_url = get_url(hls_extra_key)
data |= {"m3u8_url": json_data['m3u8_url'] if spec else m3u8_url, "record_url": m3u8_url}
else:
flv = play_url_list[selected_quality][extra_key] if extra_key else play_url_list[selected_quality]
data["flv_url"] = flv
data["record_url"] = flv
flv_url = get_url(flv_extra_key)
data |= {"flv_url": flv_url, "record_url": flv_url}
data['title'] = json_data.get('title')
data['quality'] = video_quality
return data

206
src/utils.py Normal file
View File

@ -0,0 +1,206 @@
# -*- coding: utf-8 -*-
import json
import os
import random
import shutil
import string
from pathlib import Path
import functools
import hashlib
import re
import traceback
from typing import Any
from urllib.parse import parse_qs, urlparse
from collections import OrderedDict
import execjs
from .logger import logger
import configparser
OptionalStr = str | None
OptionalDict = dict | None
class Color:
RED = "\033[31m"
GREEN = "\033[32m"
YELLOW = "\033[33m"
BLUE = "\033[34m"
MAGENTA = "\033[35m"
CYAN = "\033[36m"
WHITE = "\033[37m"
RESET = "\033[0m"
@staticmethod
def print_colored(text, color):
print(f"{color}{text}{Color.RESET}")
def trace_error_decorator(func: callable) -> callable:
@functools.wraps(func)
def wrapper(*args: list, **kwargs: dict) -> Any:
try:
return func(*args, **kwargs)
except execjs.ProgramError:
logger.warning('Failed to execute JS code. Please check if the Node.js environment')
except Exception as e:
error_line = traceback.extract_tb(e.__traceback__)[-1].lineno
error_info = f"message: type: {type(e).__name__}, {str(e)} in function {func.__name__} at line: {error_line}"
logger.error(error_info)
return []
return wrapper
def check_md5(file_path: str | Path) -> str:
with open(file_path, 'rb') as fp:
file_md5 = hashlib.md5(fp.read()).hexdigest()
return file_md5
def dict_to_cookie_str(cookies_dict: dict) -> str:
cookie_str = '; '.join([f"{key}={value}" for key, value in cookies_dict.items()])
return cookie_str
def read_config_value(file_path: str | Path, section: str, key: str) -> str | None:
config = configparser.ConfigParser()
try:
config.read(file_path, encoding='utf-8-sig')
except Exception as e:
print(f"Error occurred while reading the configuration file: {e}")
return None
if section in config:
if key in config[section]:
return config[section][key]
else:
print(f"Key [{key}] does not exist in section [{section}].")
else:
print(f"Section [{section}] does not exist in the file.")
return None
def update_config(file_path: str | Path, section: str, key: str, new_value: str) -> None:
config = configparser.ConfigParser()
try:
config.read(file_path, encoding='utf-8-sig')
except Exception as e:
print(f"An error occurred while reading the configuration file: {e}")
return
if section not in config:
print(f"Section [{section}] does not exist in the file.")
return
# 转义%字符
escaped_value = new_value.replace('%', '%%')
config[section][key] = escaped_value
try:
with open(file_path, 'w', encoding='utf-8-sig') as configfile:
config.write(configfile)
print(f"The value of {key} under [{section}] in the configuration file has been updated.")
except Exception as e:
print(f"Error occurred while writing to the configuration file: {e}")
def get_file_paths(directory: str) -> list:
file_paths = []
for root, dirs, files in os.walk(directory):
for file in files:
file_paths.append(os.path.join(root, file))
return file_paths
def remove_emojis(text: str, replace_text: str = '') -> str:
emoji_pattern = re.compile(
"["
"\U0001F1E0-\U0001F1FF" # flags (iOS)
"\U0001F300-\U0001F5FF" # symbols & pictographs
"\U0001F600-\U0001F64F" # emoticons
"\U0001F680-\U0001F6FF" # transport & map symbols
"\U0001F700-\U0001F77F" # alchemical symbols
"\U0001F780-\U0001F7FF" # Geometric Shapes Extended
"\U0001F800-\U0001F8FF" # Supplemental Arrows-C
"\U0001F900-\U0001F9FF" # Supplemental Symbols and Pictographs
"\U0001FA00-\U0001FA6F" # Chess Symbols
"\U0001FA70-\U0001FAFF" # Symbols and Pictographs Extended-A
"\U00002702-\U000027B0" # Dingbats
"]+",
flags=re.UNICODE
)
return emoji_pattern.sub(replace_text, text)
def remove_duplicate_lines(file_path: str | Path) -> None:
unique_lines = OrderedDict()
text_encoding = 'utf-8-sig'
with open(file_path, 'r', encoding=text_encoding) as input_file:
for line in input_file:
unique_lines[line.strip()] = None
with open(file_path, 'w', encoding=text_encoding) as output_file:
for line in unique_lines:
output_file.write(line + '\n')
def check_disk_capacity(file_path: str | Path, show: bool = False) -> float:
absolute_path = os.path.abspath(file_path)
directory = os.path.dirname(absolute_path)
disk_usage = shutil.disk_usage(directory)
disk_root = Path(directory).anchor
free_space_gb = disk_usage.free / (1024 ** 3)
if show:
print(f"{disk_root} Total: {disk_usage.total / (1024 ** 3):.2f} GB "
f"Used: {disk_usage.used / (1024 ** 3):.2f} GB "
f"Free: {free_space_gb:.2f} GB\n")
return free_space_gb
def handle_proxy_addr(proxy_addr):
if proxy_addr:
if not proxy_addr.startswith('http'):
proxy_addr = 'http://' + proxy_addr
else:
proxy_addr = None
return proxy_addr
def generate_random_string(length: int) -> str:
characters = string.ascii_uppercase + string.digits
random_string = ''.join(random.choices(characters, k=length))
return random_string
def jsonp_to_json(jsonp_str: str) -> OptionalDict:
pattern = r'(\w+)\((.*)\);?$'
match = re.search(pattern, jsonp_str)
if match:
_, json_str = match.groups()
json_obj = json.loads(json_str)
return json_obj
else:
raise Exception("No JSON data found in JSONP response.")
def replace_url(file_path: str | Path, old: str, new: str) -> None:
with open(file_path, 'r', encoding='utf-8-sig') as f:
content = f.read()
if old in content:
with open(file_path, 'w', encoding='utf-8-sig') as f:
f.write(content.replace(old, new))
def get_query_params(url: str, param_name: OptionalStr) -> dict | list[str]:
parsed_url = urlparse(url)
query_params = parse_qs(parsed_url.query)
if param_name is None:
return query_params
else:
values = query_params.get(param_name, [])
return values