Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
•
0d61cce
1
Parent(s):
8baca88
更新错误处理
Browse files- modules/base_model.py +21 -17
- modules/models.py +4 -0
modules/base_model.py
CHANGED
@@ -287,24 +287,28 @@ class BaseLLMModel:
|
|
287 |
|
288 |
self.history.append(construct_user(inputs))
|
289 |
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
-
|
294 |
-
|
295 |
-
|
296 |
-
|
297 |
-
|
298 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
299 |
yield chatbot, status_text
|
300 |
-
|
301 |
-
|
302 |
-
chatbot, status_text = self.next_chatbot_at_once(
|
303 |
-
inputs,
|
304 |
-
chatbot,
|
305 |
-
fake_input=old_inputs,
|
306 |
-
display_append=display_reference,
|
307 |
-
)
|
308 |
yield chatbot, status_text
|
309 |
|
310 |
if len(self.history) > 1 and self.history[-1]["content"] != inputs:
|
|
|
287 |
|
288 |
self.history.append(construct_user(inputs))
|
289 |
|
290 |
+
try:
|
291 |
+
if stream:
|
292 |
+
logging.debug("使用流式传输")
|
293 |
+
iter = self.stream_next_chatbot(
|
294 |
+
inputs,
|
295 |
+
chatbot,
|
296 |
+
fake_input=old_inputs,
|
297 |
+
display_append=display_reference,
|
298 |
+
)
|
299 |
+
for chatbot, status_text in iter:
|
300 |
+
yield chatbot, status_text
|
301 |
+
else:
|
302 |
+
logging.debug("不使用流式传输")
|
303 |
+
chatbot, status_text = self.next_chatbot_at_once(
|
304 |
+
inputs,
|
305 |
+
chatbot,
|
306 |
+
fake_input=old_inputs,
|
307 |
+
display_append=display_reference,
|
308 |
+
)
|
309 |
yield chatbot, status_text
|
310 |
+
except Exception as e:
|
311 |
+
status_text = STANDARD_ERROR_MSG + str(e)
|
|
|
|
|
|
|
|
|
|
|
|
|
312 |
yield chatbot, status_text
|
313 |
|
314 |
if len(self.history) > 1 and self.history[-1]["content"] != inputs:
|
modules/models.py
CHANGED
@@ -186,6 +186,7 @@ class OpenAIClient(BaseLLMModel):
|
|
186 |
)
|
187 |
|
188 |
def _decode_chat_response(self, response):
|
|
|
189 |
for chunk in response.iter_lines():
|
190 |
if chunk:
|
191 |
chunk = chunk.decode()
|
@@ -194,6 +195,7 @@ class OpenAIClient(BaseLLMModel):
|
|
194 |
chunk = json.loads(chunk[6:])
|
195 |
except json.JSONDecodeError:
|
196 |
print(f"JSON解析错误,收到的内容: {chunk}")
|
|
|
197 |
continue
|
198 |
if chunk_length > 6 and "delta" in chunk["choices"][0]:
|
199 |
if chunk["choices"][0]["finish_reason"] == "stop":
|
@@ -203,6 +205,8 @@ class OpenAIClient(BaseLLMModel):
|
|
203 |
except Exception as e:
|
204 |
# logging.error(f"Error: {e}")
|
205 |
continue
|
|
|
|
|
206 |
|
207 |
|
208 |
class ChatGLM_Client(BaseLLMModel):
|
|
|
186 |
)
|
187 |
|
188 |
def _decode_chat_response(self, response):
|
189 |
+
error_msg = ""
|
190 |
for chunk in response.iter_lines():
|
191 |
if chunk:
|
192 |
chunk = chunk.decode()
|
|
|
195 |
chunk = json.loads(chunk[6:])
|
196 |
except json.JSONDecodeError:
|
197 |
print(f"JSON解析错误,收到的内容: {chunk}")
|
198 |
+
error_msg+=chunk
|
199 |
continue
|
200 |
if chunk_length > 6 and "delta" in chunk["choices"][0]:
|
201 |
if chunk["choices"][0]["finish_reason"] == "stop":
|
|
|
205 |
except Exception as e:
|
206 |
# logging.error(f"Error: {e}")
|
207 |
continue
|
208 |
+
if error_msg:
|
209 |
+
raise Exception(error_msg)
|
210 |
|
211 |
|
212 |
class ChatGLM_Client(BaseLLMModel):
|