Spaces:
Sleeping
Sleeping
:boom: [Fix] inputs_tokens + max_new_tokens must <= 32768
Browse files
networks/message_streamer.py
CHANGED
@@ -28,7 +28,7 @@ class MessageStreamer:
|
|
28 |
"mistral-7b": 32768,
|
29 |
"openchat-3.5": 8192,
|
30 |
}
|
31 |
-
TOKEN_RESERVED =
|
32 |
|
33 |
def __init__(self, model: str):
|
34 |
if model in self.MODEL_MAP.keys():
|
@@ -83,10 +83,10 @@ class MessageStreamer:
|
|
83 |
temperature = max(temperature, 0.01)
|
84 |
temperature = min(temperature, 1)
|
85 |
|
86 |
-
token_limit = (
|
87 |
self.TOKEN_LIMIT_MAP[self.model]
|
88 |
- self.TOKEN_RESERVED
|
89 |
-
- self.count_tokens(prompt)
|
90 |
)
|
91 |
if token_limit <= 0:
|
92 |
raise ValueError("Prompt exceeded token limit!")
|
|
|
28 |
"mistral-7b": 32768,
|
29 |
"openchat-3.5": 8192,
|
30 |
}
|
31 |
+
TOKEN_RESERVED = 100
|
32 |
|
33 |
def __init__(self, model: str):
|
34 |
if model in self.MODEL_MAP.keys():
|
|
|
83 |
temperature = max(temperature, 0.01)
|
84 |
temperature = min(temperature, 1)
|
85 |
|
86 |
+
token_limit = int(
|
87 |
self.TOKEN_LIMIT_MAP[self.model]
|
88 |
- self.TOKEN_RESERVED
|
89 |
+
- self.count_tokens(prompt) * 1.35
|
90 |
)
|
91 |
if token_limit <= 0:
|
92 |
raise ValueError("Prompt exceeded token limit!")
|