Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

增加快手仅关注录制(解决录制异常及需要人工介入手动频繁更换cookie问题) #875

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions config/config.ini
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ language(zh_cn/en) = zh_cn
自定义脚本执行命令 =
使用代理录制的平台(逗号分隔) = tiktok, sooplive, pandalive, winktv, flextv, popkontv, twitch, liveme, showroom, chzzk, shopee, shp, youtu
额外使用代理录制的平台(逗号分隔) =
快手仅关注录制(是/否) = 是

[推送配置]
# 可选微信|钉钉|tg|邮箱|bark|ntfy 可填多个
Expand Down
120 changes: 116 additions & 4 deletions douyinliverecorder/spider.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import urllib.parse
import urllib.error
from urllib.request import Request
from typing import List
from typing import List, MutableMapping
import requests
import ssl
import re
Expand All @@ -40,6 +40,7 @@
ssl_context.verify_mode = ssl.CERT_NONE
OptionalStr = str | None
OptionalDict = dict | None
kuaishou_cookies_cache = {}


def get_req(
Expand All @@ -53,6 +54,31 @@ def get_req(
content_conding: str = 'utf-8',
redirect_url: bool = False,
) -> str:
response, _ = get_req_with_headers(
url=url,
proxy_addr=proxy_addr,
headers=headers,
data=data,
json_data=json_data,
timeout=timeout,
abroad=abroad,
content_conding=content_conding,
redirect_url=redirect_url
)
return response


def get_req_with_headers(
url: str,
proxy_addr: OptionalStr = None,
headers: OptionalDict = None,
data: dict | bytes | None = None,
json_data: dict | list | None = None,
timeout: int = 20,
abroad: bool = False,
content_conding: str = 'utf-8',
redirect_url: bool = False,
) -> (str, MutableMapping[str, str]):
if headers is None:
headers = {}
try:
Expand All @@ -70,6 +96,7 @@ def get_req(
if redirect_url:
return response.url
resp_str = response.text
resp_headers = response.headers
else:
if data and not isinstance(data, bytes):
data = urllib.parse.urlencode(data).encode(content_conding)
Expand All @@ -90,14 +117,17 @@ def get_req(
if content_encoding == 'gzip':
with gzip.open(response, 'rt', encoding=content_conding) as gzipped:
resp_str = gzipped.read()
resp_headers = response.headers
else:
resp_str = response.read().decode(content_conding)
resp_headers = response.headers
finally:
response.close()

except urllib.error.HTTPError as e:
if e.code == 400:
resp_str = e.read().decode(content_conding)
resp_headers = e.headers
else:
raise
except urllib.error.URLError as e:
Expand All @@ -109,8 +139,9 @@ def get_req(

except Exception as e:
resp_str = str(e)
resp_headers = {}

return resp_str
return resp_str, resp_headers


def get_response_status(url: str, proxy_addr: OptionalStr = None, headers: OptionalDict = None, timeout: int = 10,
Expand Down Expand Up @@ -377,6 +408,88 @@ def get_tiktok_stream_data(url: str, proxy_addr: OptionalStr = None, cookies: Op
return json_data


@trace_error_decorator
def get_kuaishou_stream_data_from_following(url: str, proxy_addr: OptionalStr = None,
cookies: OptionalStr = None) -> dict:
if cookies is None or cookies.strip() == '':
# cookies 必须设置在主页监测中, 否则返回错误
raise ValueError("快手Cookies必须设置如果开启了主页监测。")

headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0',
'Accept-Language': 'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
}
if 'kuaishou_cookies' in kuaishou_cookies_cache:
cookies = kuaishou_cookies_cache['kuaishou_cookies']

cookies_dict = {i.split('=')[0]: i.split('=')[1] for i in cookies.split('; ')}
# anti block by updating tokens
if 'kuaishou_updated' not in kuaishou_cookies_cache \
or kuaishou_cookies_cache['kuaishou_updated'] < time.time() - 60 * 10:
kuaishou_cookies_cache['kuaishou_updated'] = time.time()

# request user login https://live.kuaishou.com/live_api/baseuser/userLogin with payload {"userLoginInfo":{"authToken":"{token}","sid":"kuaishou.live.web"}} which token is kuaishou.live.web_st in cookies
# get the response json and update the cookies
user_login_url = 'https://live.kuaishou.com/live_api/baseuser/userLogin'
web_st = cookies_dict.get('kuaishou.live.web_st', '')
user_login_data = {"userLoginInfo": {
"authToken": web_st,
"sid": "kuaishou.live.web"}}
headers_login = headers.copy() | {'content-type': 'application/json'}
login_response, response_cookie = get_req_with_headers(url=user_login_url, headers=headers_login,
json_data=user_login_data)
# using the response json to update the cookies, split response_cookie with \n then finds the Set-Cookie and update the cookies
if login_response and response_cookie:
# finds starts with Set-Cookie in str(response_cookie).split('\n')
for i in str(response_cookie).split('\n'):
# 'Set-Cookie: clientid=3; path=/; expires=Tue, 06 Jan 2026 14:25:12 GMT; domain=kuaishou.com; httponly', 'Set-Cookie: did=123123; path=/; expires=Tue, 06 Jan 2026 14:25:12 GMT; domain=kuaishou.com; httponly'
if i.startswith('Set-Cookie'):
# updates the cookies, key is clientid and value is 3
value = i.split(' ')[1].split('=')[1]
# remove ends with ; in value
if value.endswith(';'):
value = value[:-1]
cookies_dict[i.split(' ')[1].split('=')[0]] = value
cookies = '; '.join([f"{k}={v}" for k, v in cookies_dict.items()])
kuaishou_cookies_cache['kuaishou_cookies'] = cookies
else:
print(f"更新Cookie失败。无响应。")

if cookies:
headers['Cookie'] = cookies
try:
html_str = get_req(url="https://live.kuaishou.com/live_api/follow/living", proxy_addr=proxy_addr,
headers=headers)
except Exception as e:
print(f"Failed to fetch data from {url}.{e}")
return {"type": 1, "is_live": False}

try:
play_list = json.loads(html_str)['data']['list']
latest_path_or_url = url.split('/')[-1]
# finds the id equ to the latest_path_or_url
play_list_item = None
for i in play_list:
if i['author']['id'].lower() == latest_path_or_url.lower():
play_list_item = i
break

except (AttributeError, IndexError, json.JSONDecodeError) as e:
print(f"失败解析JSON数据。错误: {e}")
return {"type": 1, "is_live": False}

if not play_list_item:
# print(f"没有直播数据。{url}")
return {"type": 2, "is_live": False}
play_url_list = play_list_item['playUrls'][0]['adaptationSet']['representation']
return {
"type": 2,
"anchor_name": play_list_item['author']['name'],
"flv_url_list": play_url_list,
"is_live": True
}


@trace_error_decorator
def get_kuaishou_stream_data(url: str, proxy_addr: OptionalStr = None, cookies: OptionalStr = None) -> dict:
headers = {
Expand Down Expand Up @@ -561,7 +674,6 @@ def md5(data) -> str:


def get_token_js(rid: str, did: str, proxy_addr: OptionalStr = None) -> List[str]:

url = f'https://www.douyu.com/{rid}'
html_str = get_req(url=url, proxy_addr=proxy_addr)
result = re.search(r'(vdwdae325w_64we[\s\S]*function ub98484234[\s\S]*?)function', html_str).group(1)
Expand Down Expand Up @@ -1080,7 +1192,7 @@ def get_sooplive_stream_data(
else:
anchor_name = ''

result = {"anchor_name": anchor_name or '' ,"is_live": False}
result = {"anchor_name": anchor_name or '', "is_live": False}

def get_url_list(m3u8: str) -> List[str]:
resp = get_req(url=m3u8, proxy_addr=proxy_addr, headers=headers, abroad=True)
Expand Down
16 changes: 12 additions & 4 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -501,10 +501,16 @@ def start_record(url_data: tuple, count_variable: int = -1) -> None:
elif record_url.find("https://live.kuaishou.com/") > -1:
platform = '快手直播'
with semaphore:
json_data = spider.get_kuaishou_stream_data(
url=record_url,
proxy_addr=proxy_address,
cookies=ks_cookie)
if not kuaishou_rec_following_only:
json_data = spider.get_kuaishou_stream_data(
url=record_url,
proxy_addr=proxy_address,
cookies=ks_cookie)
else:
json_data = spider.get_kuaishou_stream_data_from_following(
url=record_url,
proxy_addr=proxy_address,
cookies=ks_cookie)
port_info = stream.get_kuaishou_stream_url(json_data, record_quality)

elif record_url.find("https://www.huya.com/") > -1:
Expand Down Expand Up @@ -1617,6 +1623,8 @@ def read_config_value(config_parser: configparser.RawConfigParser, section: str,
enable_proxy_platform_list = enable_proxy_platform.replace(',', ',').split(',') if enable_proxy_platform else None
extra_enable_proxy = read_config_value(config, '录制设置', '额外使用代理录制的平台(逗号分隔)', '')
extra_enable_proxy_platform_list = extra_enable_proxy.replace(',', ',').split(',') if extra_enable_proxy else None
kuaishou_rec_following_only = options.get(
read_config_value(config, '录制设置', '快手仅关注录制(是/否)', "否"), False)
live_status_push = read_config_value(config, '推送配置', '直播状态推送渠道', "")
dingtalk_api_url = read_config_value(config, '推送配置', '钉钉推送接口链接', "")
xizhi_api_url = read_config_value(config, '推送配置', '微信推送接口链接', "")
Expand Down