Add files via upload
This commit is contained in:
parent
2e39ec7b9b
commit
9959e8b691
208
PY/分享咖啡APP.py
Normal file
208
PY/分享咖啡APP.py
Normal file
@ -0,0 +1,208 @@
|
|||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from Crypto.Hash import MD5
|
||||||
|
sys.path.append("..")
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
from Crypto.Util.Padding import pad, unpad
|
||||||
|
from urllib.parse import quote, urlparse
|
||||||
|
from base64 import b64encode, b64decode
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
from base.spider import Spider
|
||||||
|
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
self.host = self.gethost()
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def action(self, action):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
data = self.getdata("/api.php/qijiappapi.index/initV120")
|
||||||
|
dy = {"class": "类型", "area": "地区", "lang": "语言", "year": "年份", "letter": "字母", "by": "排序",
|
||||||
|
"sort": "排序"}
|
||||||
|
filters = {}
|
||||||
|
classes = []
|
||||||
|
json_data = data["type_list"]
|
||||||
|
homedata = data["banner_list"][8:]
|
||||||
|
for item in json_data:
|
||||||
|
if item["type_name"] == "全部":
|
||||||
|
continue
|
||||||
|
has_non_empty_field = False
|
||||||
|
jsontype_extend = json.loads(item["type_extend"])
|
||||||
|
homedata.extend(item["recommend_list"])
|
||||||
|
jsontype_extend["sort"] = "最新,最热,最赞"
|
||||||
|
classes.append({"type_name": item["type_name"], "type_id": item["type_id"]})
|
||||||
|
for key in dy:
|
||||||
|
if key in jsontype_extend and jsontype_extend[key].strip() != "":
|
||||||
|
has_non_empty_field = True
|
||||||
|
break
|
||||||
|
if has_non_empty_field:
|
||||||
|
filters[str(item["type_id"])] = []
|
||||||
|
for dkey in jsontype_extend:
|
||||||
|
if dkey in dy and jsontype_extend[dkey].strip() != "":
|
||||||
|
values = jsontype_extend[dkey].split(",")
|
||||||
|
value_array = [{"n": value.strip(), "v": value.strip()} for value in values if
|
||||||
|
value.strip() != ""]
|
||||||
|
filters[str(item["type_id"])].append({"key": dkey, "name": dy[dkey], "value": value_array})
|
||||||
|
result = {}
|
||||||
|
result["class"] = classes
|
||||||
|
result["filters"] = filters
|
||||||
|
result["list"] = homedata[1:]
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
body = {"area": extend.get('area', '全部'), "year": extend.get('year', '全部'), "type_id": tid, "page": pg,
|
||||||
|
"sort": extend.get('sort', '最新'), "lang": extend.get('lang', '全部'),
|
||||||
|
"class": extend.get('class', '全部')}
|
||||||
|
result = {}
|
||||||
|
data = self.getdata("/api.php/qijiappapi.index/typeFilterVodList", body)
|
||||||
|
result["list"] = data["recommend_list"]
|
||||||
|
result["page"] = pg
|
||||||
|
result["pagecount"] = 9999
|
||||||
|
result["limit"] = 90
|
||||||
|
result["total"] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
body = f"vod_id={ids[0]}"
|
||||||
|
data = self.getdata("/api.php/qijiappapi.index/vodDetail2", body)
|
||||||
|
vod = data["vod"]
|
||||||
|
play = []
|
||||||
|
names = []
|
||||||
|
for itt in data["vod_play_list"]:
|
||||||
|
a = []
|
||||||
|
names.append(itt["player_info"]["show"])
|
||||||
|
for it in itt['urls']:
|
||||||
|
it['user_agent'] = itt["player_info"].get("user_agent")
|
||||||
|
it["parse"] = itt["player_info"].get("parse")
|
||||||
|
a.append(f"{it['name']}${self.e64(json.dumps(it))}")
|
||||||
|
play.append("#".join(a))
|
||||||
|
vod["vod_play_from"] = "$$$".join(names)
|
||||||
|
vod["vod_play_url"] = "$$$".join(play)
|
||||||
|
result = {"list": [vod]}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
body = f"keywords={key}&type_id=0&page={pg}"
|
||||||
|
data = self.getdata("/api.php/qijiappapi.index/searchList", body)
|
||||||
|
result = {"list": data["search_list"], "page": pg}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
ids = json.loads(self.d64(id))
|
||||||
|
h = {"User-Agent": (ids['user_agent'] or "okhttp/3.14.9")}
|
||||||
|
try:
|
||||||
|
if re.search(r'url=', ids['parse_api_url']):
|
||||||
|
data = self.fetch(ids['parse_api_url'], headers=h, timeout=10).json()
|
||||||
|
url = data.get('url') or data['data'].get('url')
|
||||||
|
else:
|
||||||
|
body = f"parse_api={ids.get('parse') or ids['parse_api_url'].replace(ids['url'], '')}&url={quote(self.aes(ids['url'], True))}&token={ids.get('token')}"
|
||||||
|
b = self.getdata("/api.php/qijiappapi.index/vodParse", body)['json']
|
||||||
|
url = json.loads(b)['url']
|
||||||
|
if 'error' in url: raise ValueError(f"解析失败: {url}")
|
||||||
|
p = 0
|
||||||
|
except Exception as e:
|
||||||
|
print('错误信息:', e)
|
||||||
|
url, p = ids['url'], 1
|
||||||
|
|
||||||
|
if re.search(r'\.jpg|\.png|\.jpeg', url):
|
||||||
|
url = self.Mproxy(url)
|
||||||
|
result = {}
|
||||||
|
result["parse"] = p
|
||||||
|
result["url"] = url
|
||||||
|
result["header"] = h
|
||||||
|
return result
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
return self.Mlocal(param)
|
||||||
|
|
||||||
|
def gethost(self):
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'okhttp/3.14.9'
|
||||||
|
}
|
||||||
|
response = self.fetch('https://6652.kstore.space/host/%E5%92%96%E5%95%A1.txt',headers=headers).text
|
||||||
|
return response.strip()
|
||||||
|
|
||||||
|
def aes(self, text, b=None):
|
||||||
|
key = b"qwertyuiopqwerty"
|
||||||
|
cipher = AES.new(key, AES.MODE_CBC, key)
|
||||||
|
if b:
|
||||||
|
ct_bytes = cipher.encrypt(pad(text.encode("utf-8"), AES.block_size))
|
||||||
|
ct = b64encode(ct_bytes).decode("utf-8")
|
||||||
|
return ct
|
||||||
|
else:
|
||||||
|
pt = unpad(cipher.decrypt(b64decode(text)), AES.block_size)
|
||||||
|
return pt.decode("utf-8")
|
||||||
|
|
||||||
|
def header(self):
|
||||||
|
t = str(int(time.time()))
|
||||||
|
header = {"Referer": self.host,
|
||||||
|
"User-Agent": "okhttp/3.10.0",
|
||||||
|
"content-lenth": "202",
|
||||||
|
"content-type": "application/x-www-form-urlencoded"}
|
||||||
|
return header
|
||||||
|
|
||||||
|
def getdata(self, path, data=None):
|
||||||
|
vdata = self.post(f"{self.host}{path}", headers=self.header(), data=data, timeout=10).json()['data']
|
||||||
|
data1 = self.aes(vdata)
|
||||||
|
return json.loads(data1)
|
||||||
|
|
||||||
|
def Mproxy(self, url):
|
||||||
|
return f"{self.getProxyUrl()}&url={self.e64(url)}&type=m3u8"
|
||||||
|
|
||||||
|
def Mlocal(self, param, header=None):
|
||||||
|
url = self.d64(param["url"])
|
||||||
|
ydata = self.fetch(url, headers=header, allow_redirects=False)
|
||||||
|
data = ydata.content.decode('utf-8')
|
||||||
|
if ydata.headers.get('Location'):
|
||||||
|
url = ydata.headers['Location']
|
||||||
|
data = self.fetch(url, headers=header).content.decode('utf-8')
|
||||||
|
parsed_url = urlparse(url)
|
||||||
|
durl = parsed_url.scheme + "://" + parsed_url.netloc
|
||||||
|
lines = data.strip().split('\n')
|
||||||
|
for index, string in enumerate(lines):
|
||||||
|
if '#EXT' not in string and 'http' not in string:
|
||||||
|
last_slash_index = string.rfind('/')
|
||||||
|
lpath = string[:last_slash_index + 1]
|
||||||
|
lines[index] = durl + ('' if lpath.startswith('/') else '/') + lpath
|
||||||
|
data = '\n'.join(lines)
|
||||||
|
return [200, "application/vnd.apple.mpegur", data]
|
||||||
|
|
||||||
|
def e64(self, text):
|
||||||
|
try:
|
||||||
|
text_bytes = text.encode('utf-8')
|
||||||
|
encoded_bytes = b64encode(text_bytes)
|
||||||
|
return encoded_bytes.decode('utf-8')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Base64编码错误: {str(e)}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def d64(self, encoded_text):
|
||||||
|
try:
|
||||||
|
encoded_bytes = encoded_text.encode('utf-8')
|
||||||
|
decoded_bytes = b64decode(encoded_bytes)
|
||||||
|
return decoded_bytes.decode('utf-8')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Base64解码错误: {str(e)}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def md5(self, text):
|
||||||
|
h = MD5.new()
|
||||||
|
h.update(text.encode('utf-8'))
|
||||||
|
return h.hexdigest()
|
298
PY/分享小天4k.py
Normal file
298
PY/分享小天4k.py
Normal file
@ -0,0 +1,298 @@
|
|||||||
|
# coding = utf-8
|
||||||
|
# !/usr/bin/python
|
||||||
|
|
||||||
|
from Crypto.Util.Padding import unpad
|
||||||
|
from Crypto.Util.Padding import pad
|
||||||
|
from urllib.parse import unquote
|
||||||
|
from Crypto.Cipher import ARC4
|
||||||
|
from urllib.parse import quote
|
||||||
|
from base.spider import Spider
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
from datetime import datetime
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from base64 import b64decode
|
||||||
|
import urllib.request
|
||||||
|
import urllib.parse
|
||||||
|
import datetime
|
||||||
|
import binascii
|
||||||
|
import requests
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
|
||||||
|
sys.path.append('..')
|
||||||
|
|
||||||
|
xurl = "https://qjappcms.sun4k.top"
|
||||||
|
|
||||||
|
headerx = {
|
||||||
|
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.87 Safari/537.36'
|
||||||
|
}
|
||||||
|
|
||||||
|
pm = ''
|
||||||
|
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
global xurl
|
||||||
|
# global xurl1
|
||||||
|
global headerx
|
||||||
|
|
||||||
|
# global headers
|
||||||
|
|
||||||
|
def getName(self):
|
||||||
|
return "首页"
|
||||||
|
|
||||||
|
def init(self, extend):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
result = {}
|
||||||
|
result = {"class": [
|
||||||
|
{"type_id": "37", "type_name": "4K区"},
|
||||||
|
{"type_id": "42", "type_name": "HD区"},
|
||||||
|
{"type_id": "1", "type_name": "电影"},
|
||||||
|
{"type_id": "2", "type_name": "连续剧"},
|
||||||
|
{"type_id": "3", "type_name": "综艺"},
|
||||||
|
{"type_id": "4", "type_name": "动漫"},
|
||||||
|
{"type_id": "41", "type_name": "直播区"}],
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def decrypt(self, encrypted_data_b64):
|
||||||
|
key_text = "sBxqXVF5pAHbGzrH"
|
||||||
|
iv_text = "sBxqXVF5pAHbGzrH"
|
||||||
|
key_bytes = key_text.encode('utf-8')
|
||||||
|
iv_bytes = iv_text.encode('utf-8')
|
||||||
|
encrypted_data = base64.b64decode(encrypted_data_b64)
|
||||||
|
cipher = AES.new(key_bytes, AES.MODE_CBC, iv_bytes)
|
||||||
|
decrypted_padded = cipher.decrypt(encrypted_data)
|
||||||
|
decrypted = unpad(decrypted_padded, AES.block_size)
|
||||||
|
return decrypted.decode('utf-8')
|
||||||
|
|
||||||
|
def decrypt_wb(self, sencrypted_data):
|
||||||
|
key_text = "sBxqXVF5pAHbGzrH"
|
||||||
|
iv_text = "sBxqXVF5pAHbGzrH"
|
||||||
|
key_bytes = key_text.encode('utf-8')
|
||||||
|
iv_bytes = iv_text.encode('utf-8')
|
||||||
|
data_bytes = sencrypted_data.encode('utf-8')
|
||||||
|
padded_data = pad(data_bytes, AES.block_size)
|
||||||
|
cipher = AES.new(key_bytes, AES.MODE_CBC, iv_bytes)
|
||||||
|
encrypted_bytes = cipher.encrypt(padded_data)
|
||||||
|
encrypted_data_b64 = base64.b64encode(encrypted_bytes).decode('utf-8')
|
||||||
|
return encrypted_data_b64
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
result = {}
|
||||||
|
videos = []
|
||||||
|
url = f"{xurl}/api.php/getappapi.index/initV119"
|
||||||
|
res = requests.get(url=url, headers=headerx).text
|
||||||
|
res = json.loads(res)
|
||||||
|
encrypted_data = res['data']
|
||||||
|
kjson = self.decrypt(encrypted_data)
|
||||||
|
kjson1 = json.loads(kjson)
|
||||||
|
for i in kjson1['type_list']:
|
||||||
|
for item in i['recommend_list']:
|
||||||
|
id = item['vod_id']
|
||||||
|
name = item['vod_name']
|
||||||
|
pic = item['vod_pic']
|
||||||
|
remarks = item['vod_remarks']
|
||||||
|
video = {
|
||||||
|
"vod_id": id,
|
||||||
|
"vod_name": name,
|
||||||
|
"vod_pic": pic,
|
||||||
|
"vod_remarks": remarks
|
||||||
|
}
|
||||||
|
videos.append(video)
|
||||||
|
|
||||||
|
result = {'list': videos}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def categoryContent(self, cid, pg, filter, ext):
|
||||||
|
result = {}
|
||||||
|
videos = []
|
||||||
|
payload = {
|
||||||
|
'area': "全部",
|
||||||
|
'year': "全部",
|
||||||
|
'type_id': cid,
|
||||||
|
'page': str(pg),
|
||||||
|
'sort': "最新",
|
||||||
|
'lang': "全部",
|
||||||
|
'class': "全部"
|
||||||
|
}
|
||||||
|
url = f'{xurl}/api.php/getappapi.index/typeFilterVodList'
|
||||||
|
res = requests.post(url=url, headers=headerx, data=payload).text
|
||||||
|
# res1 = res.text
|
||||||
|
res = json.loads(res)
|
||||||
|
encrypted_data = res['data']
|
||||||
|
kjson = self.decrypt(encrypted_data)
|
||||||
|
kjson1 = json.loads(kjson)
|
||||||
|
# print(kjson1)
|
||||||
|
for i in kjson1['recommend_list']:
|
||||||
|
id = i['vod_id']
|
||||||
|
name = i['vod_name']
|
||||||
|
pic = i['vod_pic']
|
||||||
|
remarks = i['vod_remarks']
|
||||||
|
|
||||||
|
video = {
|
||||||
|
"vod_id": id,
|
||||||
|
"vod_name": name,
|
||||||
|
"vod_pic": pic,
|
||||||
|
"vod_remarks": remarks
|
||||||
|
}
|
||||||
|
videos.append(video)
|
||||||
|
result = {'list': videos}
|
||||||
|
result['page'] = pg
|
||||||
|
result['pagecount'] = 9999
|
||||||
|
result['limit'] = 90
|
||||||
|
result['total'] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
did = ids[0]
|
||||||
|
result = {}
|
||||||
|
videos = []
|
||||||
|
play_form = ''
|
||||||
|
play_url = ''
|
||||||
|
payload = {
|
||||||
|
'vod_id': did
|
||||||
|
}
|
||||||
|
url = f'{xurl}/api.php/getappapi.index/vodDetail2'
|
||||||
|
res = requests.post(url=url, headers=headerx, data=payload).text
|
||||||
|
res = json.loads(res)
|
||||||
|
encrypted_data = res['data']
|
||||||
|
kjson = self.decrypt(encrypted_data)
|
||||||
|
# print(kjson)
|
||||||
|
kjson1 = json.loads(kjson)
|
||||||
|
actor = kjson1['vod']['vod_actor']
|
||||||
|
director = kjson1['vod'].get('vod_director', '')
|
||||||
|
area = kjson1['vod']['vod_area']
|
||||||
|
name = kjson1['vod']['vod_name']
|
||||||
|
year = kjson1['vod']['vod_year']
|
||||||
|
content = kjson1['vod']['vod_content']
|
||||||
|
subtitle = kjson1['vod']['vod_remarks']
|
||||||
|
desc = kjson1['vod']['vod_lang']
|
||||||
|
remark = '时间:' + subtitle + ' 语言:' + desc
|
||||||
|
for line in kjson1['vod_play_list']:
|
||||||
|
if line['player_info']['show'] == '自建线路':
|
||||||
|
continue
|
||||||
|
play_form += line['player_info']['show'] + '$$$'
|
||||||
|
kurls = ""
|
||||||
|
for vod in line['urls']:
|
||||||
|
if 'qq' or 'iqiyi' or 'mgtv' or 'bilibili' or 'youku' in kurls:
|
||||||
|
kurls += str(vod['name']) + '$' + vod['parse_api_url'] + '@' + vod['token'] + '#'
|
||||||
|
else:
|
||||||
|
if kurls and 'm3u8' in kurls:
|
||||||
|
kurls += str(vod['name']) + '$' + vod['url'] + '#'
|
||||||
|
print(kurls)
|
||||||
|
kurls = kurls.rstrip('#')
|
||||||
|
play_url += kurls + '$$$'
|
||||||
|
play_form = play_form.rstrip('$$$')
|
||||||
|
play_url = play_url.rstrip('$$$')
|
||||||
|
|
||||||
|
videos.append({
|
||||||
|
"vod_id": did,
|
||||||
|
"vod_name": name,
|
||||||
|
"vod_actor": actor.replace('演员', ''),
|
||||||
|
"vod_director": director.replace('导演', ''),
|
||||||
|
"vod_content": content,
|
||||||
|
"vod_remarks": remark,
|
||||||
|
"vod_year": year + '年',
|
||||||
|
"vod_area": area,
|
||||||
|
"vod_play_from": play_form.replace('(若黑屏请到HD区)', ' '),
|
||||||
|
"vod_play_url": play_url
|
||||||
|
})
|
||||||
|
|
||||||
|
result['list'] = videos
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
# url = ''
|
||||||
|
if '.m3u8' in id:
|
||||||
|
url = id.replace('@', '')
|
||||||
|
else:
|
||||||
|
if 'qq' or 'iqiyi' or 'mgtv' or 'bilibili' or 'youku' in id:
|
||||||
|
aid = id.split('http')[0]
|
||||||
|
uid = id.split('http')[-1]
|
||||||
|
kurl = 'http' + uid.split('@')[0]
|
||||||
|
id1 = self.decrypt_wb(kurl)
|
||||||
|
payload = {
|
||||||
|
"parse_api": aid,
|
||||||
|
"url": id1,
|
||||||
|
"token": uid.split('@')[-1]
|
||||||
|
}
|
||||||
|
url1 = f"{xurl}/api.php/getappapi.index/vodParse"
|
||||||
|
response = requests.post(url=url1, headers=headerx, data=payload)
|
||||||
|
if response.status_code == 200:
|
||||||
|
response_data = response.json()
|
||||||
|
# print(response_data)
|
||||||
|
encrypted_data = response_data['data']
|
||||||
|
kjson = self.decrypt(encrypted_data)
|
||||||
|
kjson1 = json.loads(kjson)
|
||||||
|
kjson2 = kjson1['json']
|
||||||
|
kjson3 = json.loads(kjson2)
|
||||||
|
url = kjson3['url']
|
||||||
|
result = {}
|
||||||
|
result["parse"] = 0
|
||||||
|
result["playUrl"] = ''
|
||||||
|
result["url"] = url
|
||||||
|
result["header"] = headerx
|
||||||
|
return result
|
||||||
|
|
||||||
|
def searchContentPage(self, key, quick, pg):
|
||||||
|
result = {}
|
||||||
|
videos = []
|
||||||
|
payload = {
|
||||||
|
'keywords': key,
|
||||||
|
'type_id': "0",
|
||||||
|
'page': str(pg)
|
||||||
|
}
|
||||||
|
url = f'{xurl}/api.php/getappapi.index/searchList'
|
||||||
|
response = requests.post(url=url, data=payload, headers=headerx).text
|
||||||
|
res = json.loads(response)
|
||||||
|
encrypted_data = res['data']
|
||||||
|
kjson = self.decrypt(encrypted_data)
|
||||||
|
kjson1 = json.loads(kjson)
|
||||||
|
for i in kjson1['search_list']:
|
||||||
|
id = i['vod_id']
|
||||||
|
name = i['vod_name']
|
||||||
|
pic = i['vod_pic']
|
||||||
|
remarks = i['vod_year'] + ' ' + i['vod_class']
|
||||||
|
|
||||||
|
video = {
|
||||||
|
"vod_id": id,
|
||||||
|
"vod_name": name,
|
||||||
|
"vod_pic": pic,
|
||||||
|
"vod_remarks": remarks
|
||||||
|
}
|
||||||
|
videos.append(video)
|
||||||
|
result = {'list': videos}
|
||||||
|
result['page'] = pg
|
||||||
|
result['pagecount'] = 9999
|
||||||
|
result['limit'] = 90
|
||||||
|
result['total'] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
return self.searchContentPage(key, quick, '1')
|
||||||
|
|
||||||
|
def localProxy(self, params):
|
||||||
|
if params['type'] == "m3u8":
|
||||||
|
return self.proxyM3u8(params)
|
||||||
|
elif params['type'] == "media":
|
||||||
|
return self.proxyMedia(params)
|
||||||
|
elif params['type'] == "ts":
|
||||||
|
return self.proxyTs(params)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|
209
PY/分享忍者APP.py
Normal file
209
PY/分享忍者APP.py
Normal file
@ -0,0 +1,209 @@
|
|||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from Crypto.Hash import MD5
|
||||||
|
sys.path.append("..")
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
from Crypto.Util.Padding import pad, unpad
|
||||||
|
from urllib.parse import quote, urlparse
|
||||||
|
from base64 import b64encode, b64decode
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
from base.spider import Spider
|
||||||
|
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
self.host = self.gethost()
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def action(self, action):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
data = self.getdata("/api.php/getappapi.index/initV119")
|
||||||
|
dy = {"class": "类型", "area": "地区", "lang": "语言", "year": "年份", "letter": "字母", "by": "排序",
|
||||||
|
"sort": "排序"}
|
||||||
|
filters = {}
|
||||||
|
classes = []
|
||||||
|
json_data = data["type_list"]
|
||||||
|
homedata = data["banner_list"][8:]
|
||||||
|
for item in json_data:
|
||||||
|
if item["type_name"] == "全部":
|
||||||
|
continue
|
||||||
|
has_non_empty_field = False
|
||||||
|
jsontype_extend = json.loads(item["type_extend"])
|
||||||
|
homedata.extend(item["recommend_list"])
|
||||||
|
jsontype_extend["sort"] = "最新,最热,最赞"
|
||||||
|
classes.append({"type_name": item["type_name"], "type_id": item["type_id"]})
|
||||||
|
for key in dy:
|
||||||
|
if key in jsontype_extend and jsontype_extend[key].strip() != "":
|
||||||
|
has_non_empty_field = True
|
||||||
|
break
|
||||||
|
if has_non_empty_field:
|
||||||
|
filters[str(item["type_id"])] = []
|
||||||
|
for dkey in jsontype_extend:
|
||||||
|
if dkey in dy and jsontype_extend[dkey].strip() != "":
|
||||||
|
values = jsontype_extend[dkey].split(",")
|
||||||
|
value_array = [{"n": value.strip(), "v": value.strip()} for value in values if
|
||||||
|
value.strip() != ""]
|
||||||
|
filters[str(item["type_id"])].append({"key": dkey, "name": dy[dkey], "value": value_array})
|
||||||
|
result = {}
|
||||||
|
result["class"] = classes
|
||||||
|
result["filters"] = filters
|
||||||
|
result["list"] = homedata[1:]
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
body = {"area": extend.get('area', '全部'), "year": extend.get('year', '全部'), "type_id": tid, "page": pg,
|
||||||
|
"sort": extend.get('sort', '最新'), "lang": extend.get('lang', '全部'),
|
||||||
|
"class": extend.get('class', '全部')}
|
||||||
|
result = {}
|
||||||
|
data = self.getdata("/api.php/getappapi.index/typeFilterVodList", body)
|
||||||
|
result["list"] = data["recommend_list"]
|
||||||
|
result["page"] = pg
|
||||||
|
result["pagecount"] = 9999
|
||||||
|
result["limit"] = 90
|
||||||
|
result["total"] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
body = f"vod_id={ids[0]}"
|
||||||
|
data = self.getdata("/api.php/getappapi.index/vodDetail", body)
|
||||||
|
vod = data["vod"]
|
||||||
|
play = []
|
||||||
|
names = []
|
||||||
|
for itt in data["vod_play_list"]:
|
||||||
|
a = []
|
||||||
|
names.append(itt["player_info"]["show"])
|
||||||
|
for it in itt['urls']:
|
||||||
|
it['user_agent'] = itt["player_info"].get("user_agent")
|
||||||
|
it["parse"] = itt["player_info"].get("parse")
|
||||||
|
a.append(f"{it['name']}${self.e64(json.dumps(it))}")
|
||||||
|
play.append("#".join(a))
|
||||||
|
vod["vod_play_from"] ="$$$".join(names)
|
||||||
|
vod["vod_play_url"] = "$$$".join(play)
|
||||||
|
result = {"list": [vod]}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
body = f"keywords={key}&type_id=0&page={pg}"
|
||||||
|
data = self.getdata("/api.php/getappapi.index/searchList", body)
|
||||||
|
result = {"list": data["search_list"], "page": pg}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
ids = json.loads(self.d64(id))
|
||||||
|
h = {"User-Agent": (ids['user_agent'] or "okhttp/3.14.9")}
|
||||||
|
try:
|
||||||
|
if re.search(r'url=', ids['parse_api_url']):
|
||||||
|
data = self.fetch(ids['parse_api_url'], headers=h, timeout=10).json()
|
||||||
|
url = data.get('url') or data['data'].get('url')
|
||||||
|
else:
|
||||||
|
body = f"parse_api={ids.get('parse') or ids['parse_api_url'].replace(ids['url'], '')}&url={quote(self.aes(ids['url'], True))}&token={ids.get('token')}"
|
||||||
|
b = self.getdata("/api.php/getappapi.index/vodParse", body)['json']
|
||||||
|
url = json.loads(b)['url']
|
||||||
|
if 'error' in url: raise ValueError(f"解析失败: {url}")
|
||||||
|
p = 0
|
||||||
|
except Exception as e:
|
||||||
|
print('错误信息:', e)
|
||||||
|
url, p = ids['url'], 1
|
||||||
|
|
||||||
|
if re.search(r'\.jpg|\.png|\.jpeg', url):
|
||||||
|
url = self.Mproxy(url)
|
||||||
|
result = {}
|
||||||
|
result["parse"] = p
|
||||||
|
result["url"] = url
|
||||||
|
result["header"] = h
|
||||||
|
return result
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
return self.Mlocal(param)
|
||||||
|
|
||||||
|
def gethost(self):
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'okhttp/3.14.9'
|
||||||
|
}
|
||||||
|
host ='https://mfxs123.sbs'
|
||||||
|
return host.strip()
|
||||||
|
|
||||||
|
def aes(self, text, b=None):
|
||||||
|
key = b"1234567887654321"
|
||||||
|
cipher = AES.new(key, AES.MODE_CBC, key)
|
||||||
|
if b:
|
||||||
|
ct_bytes = cipher.encrypt(pad(text.encode("utf-8"), AES.block_size))
|
||||||
|
ct = b64encode(ct_bytes).decode("utf-8")
|
||||||
|
return ct
|
||||||
|
else:
|
||||||
|
pt = unpad(cipher.decrypt(b64decode(text)), AES.block_size)
|
||||||
|
return pt.decode("utf-8")
|
||||||
|
|
||||||
|
def header(self):
|
||||||
|
t = str(int(time.time()))
|
||||||
|
header = {"Referer": self.host,
|
||||||
|
"User-Agent": "okhttp/3.14.9", "app-version-code": "140", "app-ui-mode": "light",
|
||||||
|
"app-api-verify-time": t, "app-user-device-id": self.md5(t),
|
||||||
|
"app-api-verify-sign": self.aes(t, True),
|
||||||
|
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8"}
|
||||||
|
return header
|
||||||
|
|
||||||
|
def getdata(self, path, data=None):
|
||||||
|
vdata = self.post(f"{self.host}{path}", headers=self.header(), data=data, timeout=10).json()['data']
|
||||||
|
data1 = self.aes(vdata)
|
||||||
|
return json.loads(data1)
|
||||||
|
|
||||||
|
def Mproxy(self, url):
|
||||||
|
return f"{self.getProxyUrl()}&url={self.e64(url)}&type=m3u8"
|
||||||
|
|
||||||
|
def Mlocal(self, param, header=None):
|
||||||
|
url = self.d64(param["url"])
|
||||||
|
ydata = self.fetch(url, headers=header, allow_redirects=False)
|
||||||
|
data = ydata.content.decode('utf-8')
|
||||||
|
if ydata.headers.get('Location'):
|
||||||
|
url = ydata.headers['Location']
|
||||||
|
data = self.fetch(url, headers=header).content.decode('utf-8')
|
||||||
|
parsed_url = urlparse(url)
|
||||||
|
durl = parsed_url.scheme + "://" + parsed_url.netloc
|
||||||
|
lines = data.strip().split('\n')
|
||||||
|
for index, string in enumerate(lines):
|
||||||
|
if '#EXT' not in string and 'http' not in string:
|
||||||
|
last_slash_index = string.rfind('/')
|
||||||
|
lpath = string[:last_slash_index + 1]
|
||||||
|
lines[index] = durl + ('' if lpath.startswith('/') else '/') + lpath
|
||||||
|
data = '\n'.join(lines)
|
||||||
|
return [200, "application/vnd.apple.mpegur", data]
|
||||||
|
|
||||||
|
def e64(self, text):
|
||||||
|
try:
|
||||||
|
text_bytes = text.encode('utf-8')
|
||||||
|
encoded_bytes = b64encode(text_bytes)
|
||||||
|
return encoded_bytes.decode('utf-8')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Base64编码错误: {str(e)}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def d64(self, encoded_text):
|
||||||
|
try:
|
||||||
|
encoded_bytes = encoded_text.encode('utf-8')
|
||||||
|
decoded_bytes = b64decode(encoded_bytes)
|
||||||
|
return decoded_bytes.decode('utf-8')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Base64解码错误: {str(e)}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def md5(self, text):
|
||||||
|
h = MD5.new()
|
||||||
|
h.update(text.encode('utf-8'))
|
||||||
|
return h.hexdigest()
|
209
PY/分享瓜萌APP.py
Normal file
209
PY/分享瓜萌APP.py
Normal file
@ -0,0 +1,209 @@
|
|||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from Crypto.Hash import MD5
|
||||||
|
sys.path.append("..")
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
from Crypto.Util.Padding import pad, unpad
|
||||||
|
from urllib.parse import quote, urlparse
|
||||||
|
from base64 import b64encode, b64decode
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
from base.spider import Spider
|
||||||
|
|
||||||
|
|
||||||
|
class Spider(Spider):
|
||||||
|
|
||||||
|
def init(self, extend=""):
|
||||||
|
self.host = self.gethost()
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isVideoFormat(self, url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def manualVideoCheck(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def action(self, action):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def homeContent(self, filter):
|
||||||
|
data = self.getdata("/api.php/getappapi.index/initV119")
|
||||||
|
dy = {"class": "类型", "area": "地区", "lang": "语言", "year": "年份", "letter": "字母", "by": "排序",
|
||||||
|
"sort": "排序"}
|
||||||
|
filters = {}
|
||||||
|
classes = []
|
||||||
|
json_data = data["type_list"]
|
||||||
|
homedata = data["banner_list"][8:]
|
||||||
|
for item in json_data:
|
||||||
|
if item["type_name"] == "全部":
|
||||||
|
continue
|
||||||
|
has_non_empty_field = False
|
||||||
|
jsontype_extend = json.loads(item["type_extend"])
|
||||||
|
homedata.extend(item["recommend_list"])
|
||||||
|
jsontype_extend["sort"] = "最新,最热,最赞"
|
||||||
|
classes.append({"type_name": item["type_name"], "type_id": item["type_id"]})
|
||||||
|
for key in dy:
|
||||||
|
if key in jsontype_extend and jsontype_extend[key].strip() != "":
|
||||||
|
has_non_empty_field = True
|
||||||
|
break
|
||||||
|
if has_non_empty_field:
|
||||||
|
filters[str(item["type_id"])] = []
|
||||||
|
for dkey in jsontype_extend:
|
||||||
|
if dkey in dy and jsontype_extend[dkey].strip() != "":
|
||||||
|
values = jsontype_extend[dkey].split(",")
|
||||||
|
value_array = [{"n": value.strip(), "v": value.strip()} for value in values if
|
||||||
|
value.strip() != ""]
|
||||||
|
filters[str(item["type_id"])].append({"key": dkey, "name": dy[dkey], "value": value_array})
|
||||||
|
result = {}
|
||||||
|
result["class"] = classes
|
||||||
|
result["filters"] = filters
|
||||||
|
result["list"] = homedata[1:]
|
||||||
|
return result
|
||||||
|
|
||||||
|
def homeVideoContent(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def categoryContent(self, tid, pg, filter, extend):
|
||||||
|
body = {"area": extend.get('area', '全部'), "year": extend.get('year', '全部'), "type_id": tid, "page": pg,
|
||||||
|
"sort": extend.get('sort', '最新'), "lang": extend.get('lang', '全部'),
|
||||||
|
"class": extend.get('class', '全部')}
|
||||||
|
result = {}
|
||||||
|
data = self.getdata("/api.php/getappapi.index/typeFilterVodList", body)
|
||||||
|
result["list"] = data["recommend_list"]
|
||||||
|
result["page"] = pg
|
||||||
|
result["pagecount"] = 9999
|
||||||
|
result["limit"] = 90
|
||||||
|
result["total"] = 999999
|
||||||
|
return result
|
||||||
|
|
||||||
|
def detailContent(self, ids):
|
||||||
|
body = f"vod_id={ids[0]}"
|
||||||
|
data = self.getdata("/api.php/getappapi.index/vodDetail", body)
|
||||||
|
vod = data["vod"]
|
||||||
|
play = []
|
||||||
|
names = []
|
||||||
|
for itt in data["vod_play_list"]:
|
||||||
|
a = []
|
||||||
|
names.append(itt["player_info"]["show"])
|
||||||
|
for it in itt['urls']:
|
||||||
|
it['user_agent'] = itt["player_info"].get("user_agent")
|
||||||
|
it["parse"] = itt["player_info"].get("parse")
|
||||||
|
a.append(f"{it['name']}${self.e64(json.dumps(it))}")
|
||||||
|
play.append("#".join(a))
|
||||||
|
vod["vod_play_from"] ="$$$".join(names)
|
||||||
|
vod["vod_play_url"] = "$$$".join(play)
|
||||||
|
result = {"list": [vod]}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def searchContent(self, key, quick, pg="1"):
|
||||||
|
body = f"keywords={key}&type_id=0&page={pg}"
|
||||||
|
data = self.getdata("/api.php/getappapi.index/searchList", body)
|
||||||
|
result = {"list": data["search_list"], "page": pg}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def playerContent(self, flag, id, vipFlags):
|
||||||
|
ids = json.loads(self.d64(id))
|
||||||
|
h = {"User-Agent": (ids['user_agent'] or "okhttp/3.14.9")}
|
||||||
|
try:
|
||||||
|
if re.search(r'url=', ids['parse_api_url']):
|
||||||
|
data = self.fetch(ids['parse_api_url'], headers=h, timeout=10).json()
|
||||||
|
url = data.get('url') or data['data'].get('url')
|
||||||
|
else:
|
||||||
|
body = f"parse_api={ids.get('parse') or ids['parse_api_url'].replace(ids['url'], '')}&url={quote(self.aes(ids['url'], True))}&token={ids.get('token')}"
|
||||||
|
b = self.getdata("/api.php/getappapi.index/vodParse", body)['json']
|
||||||
|
url = json.loads(b)['url']
|
||||||
|
if 'error' in url: raise ValueError(f"解析失败: {url}")
|
||||||
|
p = 0
|
||||||
|
except Exception as e:
|
||||||
|
print('错误信息:', e)
|
||||||
|
url, p = ids['url'], 1
|
||||||
|
|
||||||
|
if re.search(r'\.jpg|\.png|\.jpeg', url):
|
||||||
|
url = self.Mproxy(url)
|
||||||
|
result = {}
|
||||||
|
result["parse"] = p
|
||||||
|
result["url"] = url
|
||||||
|
result["header"] = h
|
||||||
|
return result
|
||||||
|
|
||||||
|
def localProxy(self, param):
|
||||||
|
return self.Mlocal(param)
|
||||||
|
|
||||||
|
def gethost(self):
|
||||||
|
headers = {
|
||||||
|
'User-Agent': 'okhttp/3.14.9'
|
||||||
|
}
|
||||||
|
host ='https://www.guahd.com'
|
||||||
|
return host.strip()
|
||||||
|
|
||||||
|
def aes(self, text, b=None):
|
||||||
|
key = b"f2A7D4B9E8C16531"
|
||||||
|
cipher = AES.new(key, AES.MODE_CBC, key)
|
||||||
|
if b:
|
||||||
|
ct_bytes = cipher.encrypt(pad(text.encode("utf-8"), AES.block_size))
|
||||||
|
ct = b64encode(ct_bytes).decode("utf-8")
|
||||||
|
return ct
|
||||||
|
else:
|
||||||
|
pt = unpad(cipher.decrypt(b64decode(text)), AES.block_size)
|
||||||
|
return pt.decode("utf-8")
|
||||||
|
|
||||||
|
def header(self):
|
||||||
|
t = str(int(time.time()))
|
||||||
|
header = {"Referer": self.host,
|
||||||
|
"User-Agent": "okhttp/3.14.9", "app-version-code": "140", "app-ui-mode": "light",
|
||||||
|
"app-api-verify-time": t, "app-user-device-id": self.md5(t),
|
||||||
|
"app-api-verify-sign": self.aes(t, True),
|
||||||
|
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8"}
|
||||||
|
return header
|
||||||
|
|
||||||
|
def getdata(self, path, data=None):
|
||||||
|
vdata = self.post(f"{self.host}{path}", headers=self.header(), data=data, timeout=10).json()['data']
|
||||||
|
data1 = self.aes(vdata)
|
||||||
|
return json.loads(data1)
|
||||||
|
|
||||||
|
def Mproxy(self, url):
|
||||||
|
return f"{self.getProxyUrl()}&url={self.e64(url)}&type=m3u8"
|
||||||
|
|
||||||
|
def Mlocal(self, param, header=None):
|
||||||
|
url = self.d64(param["url"])
|
||||||
|
ydata = self.fetch(url, headers=header, allow_redirects=False)
|
||||||
|
data = ydata.content.decode('utf-8')
|
||||||
|
if ydata.headers.get('Location'):
|
||||||
|
url = ydata.headers['Location']
|
||||||
|
data = self.fetch(url, headers=header).content.decode('utf-8')
|
||||||
|
parsed_url = urlparse(url)
|
||||||
|
durl = parsed_url.scheme + "://" + parsed_url.netloc
|
||||||
|
lines = data.strip().split('\n')
|
||||||
|
for index, string in enumerate(lines):
|
||||||
|
if '#EXT' not in string and 'http' not in string:
|
||||||
|
last_slash_index = string.rfind('/')
|
||||||
|
lpath = string[:last_slash_index + 1]
|
||||||
|
lines[index] = durl + ('' if lpath.startswith('/') else '/') + lpath
|
||||||
|
data = '\n'.join(lines)
|
||||||
|
return [200, "application/vnd.apple.mpegur", data]
|
||||||
|
|
||||||
|
def e64(self, text):
|
||||||
|
try:
|
||||||
|
text_bytes = text.encode('utf-8')
|
||||||
|
encoded_bytes = b64encode(text_bytes)
|
||||||
|
return encoded_bytes.decode('utf-8')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Base64编码错误: {str(e)}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def d64(self, encoded_text):
|
||||||
|
try:
|
||||||
|
encoded_bytes = encoded_text.encode('utf-8')
|
||||||
|
decoded_bytes = b64decode(encoded_bytes)
|
||||||
|
return decoded_bytes.decode('utf-8')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Base64解码错误: {str(e)}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def md5(self, text):
|
||||||
|
h = MD5.new()
|
||||||
|
h.update(text.encode('utf-8'))
|
||||||
|
return h.hexdigest()
|
Loading…
x
Reference in New Issue
Block a user