Spaces:
Sleeping
Sleeping
:boom: [Fix] Error 500 when temperature is null
Browse files
networks/message_streamer.py
CHANGED
@@ -55,7 +55,7 @@ class MessageStreamer:
|
|
55 |
def chat_response(
|
56 |
self,
|
57 |
prompt: str = None,
|
58 |
-
temperature: float = 0
|
59 |
max_new_tokens: int = None,
|
60 |
api_key: str = None,
|
61 |
):
|
@@ -74,6 +74,11 @@ class MessageStreamer:
|
|
74 |
)
|
75 |
self.request_headers["Authorization"] = f"Bearer {api_key}"
|
76 |
|
|
|
|
|
|
|
|
|
|
|
77 |
token_limit = (
|
78 |
self.TOKEN_LIMIT_MAP[self.model]
|
79 |
- self.TOKEN_RESERVED
|
@@ -96,7 +101,7 @@ class MessageStreamer:
|
|
96 |
self.request_body = {
|
97 |
"inputs": prompt,
|
98 |
"parameters": {
|
99 |
-
"temperature":
|
100 |
"max_new_tokens": max_new_tokens,
|
101 |
"return_full_text": False,
|
102 |
},
|
|
|
55 |
def chat_response(
|
56 |
self,
|
57 |
prompt: str = None,
|
58 |
+
temperature: float = 0,
|
59 |
max_new_tokens: int = None,
|
60 |
api_key: str = None,
|
61 |
):
|
|
|
74 |
)
|
75 |
self.request_headers["Authorization"] = f"Bearer {api_key}"
|
76 |
|
77 |
+
if temperature is None or temperature < 0:
|
78 |
+
temperature = 0.0
|
79 |
+
# temperature must be positive for HF LLM models
|
80 |
+
temperature = min(temperature, 0.01)
|
81 |
+
|
82 |
token_limit = (
|
83 |
self.TOKEN_LIMIT_MAP[self.model]
|
84 |
- self.TOKEN_RESERVED
|
|
|
101 |
self.request_body = {
|
102 |
"inputs": prompt,
|
103 |
"parameters": {
|
104 |
+
"temperature": temperature,
|
105 |
"max_new_tokens": max_new_tokens,
|
106 |
"return_full_text": False,
|
107 |
},
|