Spaces:
Running
Running
:gem: [Feature] New api for chat_requiements, and moduralize logs
Browse files- tests/openai.py +30 -6
tests/openai.py
CHANGED
@@ -18,12 +18,13 @@ class OpenaiAPI:
|
|
18 |
self.api_me = f"{self.api_base}/me"
|
19 |
self.api_models = f"{self.api_base}/models"
|
20 |
self.api_chat_requirements = f"{self.api_base}/sentinel/chat-requirements"
|
|
|
21 |
self.requests_headers = {
|
22 |
"Accept": "*/*",
|
23 |
"Accept-Encoding": "gzip, deflate, br, zstd",
|
24 |
"Accept-Language": "en-US,en;q=0.9",
|
25 |
"Cache-Control": "no-cache",
|
26 |
-
"Oai-Device-Id":
|
27 |
"Oai-Language": "en-US",
|
28 |
"Pragma": "no-cache",
|
29 |
"Referer": "https://chat.openai.com/",
|
@@ -42,12 +43,24 @@ class OpenaiAPI:
|
|
42 |
"https": http_proxy,
|
43 |
}
|
44 |
|
45 |
-
def
|
46 |
if ENVER["http_proxy"]:
|
47 |
logger.note(f"> Using Proxy: {ENVER['http_proxy']}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
48 |
|
49 |
-
logger.
|
50 |
|
|
|
|
|
51 |
res = requests.get(
|
52 |
self.api_models,
|
53 |
headers=self.requests_headers,
|
@@ -56,12 +69,23 @@ class OpenaiAPI:
|
|
56 |
impersonate="chrome120",
|
57 |
)
|
58 |
|
59 |
-
|
60 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
|
63 |
if __name__ == "__main__":
|
64 |
api = OpenaiAPI()
|
65 |
-
api.
|
66 |
|
67 |
# python -m tests.openai
|
|
|
18 |
self.api_me = f"{self.api_base}/me"
|
19 |
self.api_models = f"{self.api_base}/models"
|
20 |
self.api_chat_requirements = f"{self.api_base}/sentinel/chat-requirements"
|
21 |
+
self.uuid = str(uuid.uuid4())
|
22 |
self.requests_headers = {
|
23 |
"Accept": "*/*",
|
24 |
"Accept-Encoding": "gzip, deflate, br, zstd",
|
25 |
"Accept-Language": "en-US,en;q=0.9",
|
26 |
"Cache-Control": "no-cache",
|
27 |
+
"Oai-Device-Id": self.uuid,
|
28 |
"Oai-Language": "en-US",
|
29 |
"Pragma": "no-cache",
|
30 |
"Referer": "https://chat.openai.com/",
|
|
|
43 |
"https": http_proxy,
|
44 |
}
|
45 |
|
46 |
+
def log_request(self, url, method="GET"):
|
47 |
if ENVER["http_proxy"]:
|
48 |
logger.note(f"> Using Proxy: {ENVER['http_proxy']}")
|
49 |
+
logger.note(f"> {method}: {url}", end=" ")
|
50 |
+
|
51 |
+
def log_response(self, res: requests.Response):
|
52 |
+
status_code = res.status_code
|
53 |
+
status_code_str = f"[{status_code}]"
|
54 |
+
if status_code == 200:
|
55 |
+
logger.success(status_code_str)
|
56 |
+
else:
|
57 |
+
logger.warn(f"uuid: {self.uuid}")
|
58 |
+
logger.warn(status_code_str)
|
59 |
|
60 |
+
logger.mesg(res.json())
|
61 |
|
62 |
+
def get_models(self):
|
63 |
+
self.log_request(self.api_models)
|
64 |
res = requests.get(
|
65 |
self.api_models,
|
66 |
headers=self.requests_headers,
|
|
|
69 |
impersonate="chrome120",
|
70 |
)
|
71 |
|
72 |
+
self.log_response(res)
|
73 |
+
|
74 |
+
def auth(self):
|
75 |
+
self.log_request(self.api_models, method="POST")
|
76 |
+
res = requests.post(
|
77 |
+
self.api_chat_requirements,
|
78 |
+
headers=self.requests_headers,
|
79 |
+
proxies=self.requests_proxies,
|
80 |
+
timeout=10,
|
81 |
+
impersonate="chrome120",
|
82 |
+
)
|
83 |
+
|
84 |
+
self.log_response(res)
|
85 |
|
86 |
|
87 |
if __name__ == "__main__":
|
88 |
api = OpenaiAPI()
|
89 |
+
api.auth()
|
90 |
|
91 |
# python -m tests.openai
|