Spaces:
Running
Running
rogerxavier
commited on
Commit
•
09779cf
1
Parent(s):
c3f19de
Delete utils.py
Browse files
utils.py
DELETED
@@ -1,83 +0,0 @@
|
|
1 |
-
# -*- coding: utf-8 -*-
|
2 |
-
import requests
|
3 |
-
|
4 |
-
from bs4 import BeautifulSoup
|
5 |
-
|
6 |
-
session = requests.Session()
|
7 |
-
#购买请求test
|
8 |
-
urlBase = 'https://jingling.bifangpu.com'
|
9 |
-
purchaseBase = urlBase+"/api/resource/purchase"
|
10 |
-
|
11 |
-
#截止时间信息:jinglingpan:sessid=bc5a890e-112d-4e3e-bf45-ad93a8c9ba1f; path=/; expires=Sun, 03 Nov 2024 01:22:08 GMT, jinglingpan:sessid.sig=590Y0XnEHcnqvRDMisZcjQxIqt8; path=/; expires=Sun, 03 Nov 2024 01:22:08 GMT
|
12 |
-
cookie={"jinglingpan:sessid": "bc5a890e-112d-4e3e-bf45-ad93a8c9ba1f", "jinglingpan:sessid.sig": "590Y0XnEHcnqvRDMisZcjQxIqt8"}
|
13 |
-
|
14 |
-
session.cookies = requests.utils.cookiejar_from_dict(cookie)
|
15 |
-
|
16 |
-
headers = {
|
17 |
-
"authority": "jingling.bifangpu.com",
|
18 |
-
"method": "GET",
|
19 |
-
"path": "/resource/detail/b01940f0f34e6ca91f68b258362d50f1",
|
20 |
-
"scheme": "https",
|
21 |
-
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
|
22 |
-
"Accept-Encoding": "gzip, deflate, br, zstd",
|
23 |
-
"Accept-Language": "zh-CN,zh;q=0.9",
|
24 |
-
"Cache-Control": "max-age=0",
|
25 |
-
"Dnt": "1",
|
26 |
-
"If-None-Match": "\"5b56-ChT6C0U9s3iYKFMBV41XLcqXxRc\"",
|
27 |
-
"Priority": "u=0, i",
|
28 |
-
"Sec-Ch-Ua": "\"Not/A)Brand\";v=\"8\", \"Chromium\";v=\"126\", \"Google Chrome\";v=\"126\"",
|
29 |
-
"Sec-Ch-Ua-Mobile": "?0",
|
30 |
-
"Sec-Ch-Ua-Platform": "\"Windows\"",
|
31 |
-
"Sec-Fetch-Dest": "document",
|
32 |
-
"Sec-Fetch-Mode": "navigate",
|
33 |
-
"Sec-Fetch-Site": "same-origin",
|
34 |
-
"Sec-Fetch-User": "?1",
|
35 |
-
"Upgrade-Insecure-Requests": "1",
|
36 |
-
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36"
|
37 |
-
}
|
38 |
-
|
39 |
-
#返回{"code": 10602}说明已经购买过 #如果是1006说明当月已经超过限制,需要换号了
|
40 |
-
def try_puchase(fid:str,session)->dict:
|
41 |
-
payload = {
|
42 |
-
"fid": fid
|
43 |
-
}
|
44 |
-
resp = session.post(purchaseBase, data=payload)
|
45 |
-
return resp.json()
|
46 |
-
|
47 |
-
|
48 |
-
#如果是1006说明当月已经超过限制,需要换号了
|
49 |
-
#获取网盘信息函数,如果10602购买过,则刷新页面获取网盘信息,否则购买后刷新,反正都是调用一次try_purchase函数进行判断再刷新界面
|
50 |
-
#->list / None
|
51 |
-
def get_pan_info(fid:str,session):
|
52 |
-
#返回提取码和网盘连接的list
|
53 |
-
result = []
|
54 |
-
detailContent = get_detail(fid=fid, session=session)
|
55 |
-
|
56 |
-
soup = BeautifulSoup(detailContent, 'html.parser')
|
57 |
-
|
58 |
-
copy_contents = soup.find_all('span', class_='copy-content')
|
59 |
-
if copy_contents.__len__()>0:
|
60 |
-
for info in copy_contents:
|
61 |
-
#print(info.text) # 遍历得到提取码和网盘链接
|
62 |
-
result.append(info.text)
|
63 |
-
return result
|
64 |
-
return None
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
#通过fid刷新页面获取html返回信息函数
|
69 |
-
def get_detail(fid:str,session)->str:
|
70 |
-
detailUrl = urlBase+'/resource/detail/'+fid
|
71 |
-
resp = session.get(detailUrl)
|
72 |
-
return resp.text
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
if __name__ == '__main__':
|
77 |
-
fid = "b01940f0f34e6ca91f68b258362d50f1"
|
78 |
-
session.headers=headers
|
79 |
-
jsonResp = try_puchase(fid=fid,session=session)
|
80 |
-
print(jsonResp)
|
81 |
-
|
82 |
-
panResult = get_pan_info(fid = fid,session=session)#每次先尝试购买然后重新打开detail页面查询返回
|
83 |
-
print(panResult)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|