Spaces:
Sleeping
Sleeping
ogegadavis254
commited on
Commit
•
1b110d7
1
Parent(s):
bf8b0f5
Update app.py
Browse files
app.py
CHANGED
@@ -55,7 +55,6 @@ def interact_with_huggingface_model(messages, model):
|
|
55 |
# Function to interact with the Together API model
|
56 |
def interact_with_together_api(messages):
|
57 |
all_messages = []
|
58 |
-
entire_assistant_response = "" # Initialize the variable
|
59 |
|
60 |
if not messages: # If history is empty
|
61 |
all_messages.append({"role": "user", "content": ""})
|
@@ -76,7 +75,6 @@ def interact_with_together_api(messages):
|
|
76 |
"repetition_penalty": 1,
|
77 |
"n": 1,
|
78 |
"messages": all_messages,
|
79 |
-
"stream_tokens": True,
|
80 |
}
|
81 |
|
82 |
TOGETHER_API_KEY = os.getenv('TOGETHER_API_KEY')
|
@@ -86,34 +84,14 @@ def interact_with_together_api(messages):
|
|
86 |
"Authorization": f"Bearer {TOGETHER_API_KEY}",
|
87 |
}
|
88 |
|
89 |
-
response = requests.post(url, json=payload, headers=headers
|
90 |
response.raise_for_status() # Ensure HTTP request was successful
|
91 |
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
if decoded_line == "data: [DONE]":
|
98 |
-
if entire_assistant_response:
|
99 |
-
yield entire_assistant_response # Yield the entire response at the end
|
100 |
-
break
|
101 |
-
|
102 |
-
try:
|
103 |
-
# Decode and strip any SSE format specific prefix ("data: ")
|
104 |
-
if decoded_line.startswith("data: "):
|
105 |
-
decoded_line = decoded_line.replace("data: ", "")
|
106 |
-
chunk_data = json.loads(decoded_line)
|
107 |
-
content = chunk_data['choices'][0]['delta']['content']
|
108 |
-
entire_assistant_response += content # Aggregate content
|
109 |
-
yield content # Yield each chunk separately
|
110 |
-
|
111 |
-
except json.JSONDecodeError:
|
112 |
-
print(f"Invalid JSON received: {decoded_line}")
|
113 |
-
continue
|
114 |
-
except KeyError as e:
|
115 |
-
print(f"KeyError encountered: {e}")
|
116 |
-
continue
|
117 |
|
118 |
# Create sidebar with model selection dropdown and temperature slider
|
119 |
selected_model = st.sidebar.selectbox("Select Model", list(model_links.keys()))
|
@@ -147,15 +125,15 @@ if prompt := st.chat_input(f"Hi, I'm {selected_model}, ask me a question"):
|
|
147 |
|
148 |
# Interact with selected model
|
149 |
if selected_model == "Nous-Hermes-2-Yi-34B":
|
150 |
-
|
151 |
else:
|
152 |
-
|
153 |
|
154 |
# Display assistant response in chat message container
|
155 |
-
with st.
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
st.session_state.messages.append(("assistant",
|
|
|
55 |
# Function to interact with the Together API model
|
56 |
def interact_with_together_api(messages):
|
57 |
all_messages = []
|
|
|
58 |
|
59 |
if not messages: # If history is empty
|
60 |
all_messages.append({"role": "user", "content": ""})
|
|
|
75 |
"repetition_penalty": 1,
|
76 |
"n": 1,
|
77 |
"messages": all_messages,
|
|
|
78 |
}
|
79 |
|
80 |
TOGETHER_API_KEY = os.getenv('TOGETHER_API_KEY')
|
|
|
84 |
"Authorization": f"Bearer {TOGETHER_API_KEY}",
|
85 |
}
|
86 |
|
87 |
+
response = requests.post(url, json=payload, headers=headers)
|
88 |
response.raise_for_status() # Ensure HTTP request was successful
|
89 |
|
90 |
+
# Extract response from JSON
|
91 |
+
response_data = response.json()
|
92 |
+
assistant_response = response_data["choices"][0]["message"]
|
93 |
+
|
94 |
+
return assistant_response
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
95 |
|
96 |
# Create sidebar with model selection dropdown and temperature slider
|
97 |
selected_model = st.sidebar.selectbox("Select Model", list(model_links.keys()))
|
|
|
125 |
|
126 |
# Interact with selected model
|
127 |
if selected_model == "Nous-Hermes-2-Yi-34B":
|
128 |
+
assistant_response = interact_with_together_api(st.session_state.messages)
|
129 |
else:
|
130 |
+
assistant_response = interact_with_huggingface_model(st.session_state.messages, model_links[selected_model])
|
131 |
|
132 |
# Display assistant response in chat message container
|
133 |
+
with st.empty():
|
134 |
+
st.markdown("AI is typing...")
|
135 |
+
st.empty()
|
136 |
+
st.markdown(assistant_response)
|
137 |
+
|
138 |
+
# Add assistant response to chat history
|
139 |
+
st.session_state.messages.append(("assistant", assistant_response))
|