Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,13 +2,9 @@ import torch
|
|
2 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
from flask import Flask, request, jsonify, render_template_string
|
4 |
import time
|
5 |
-
from flask_sse import sse
|
6 |
-
import redis
|
7 |
|
8 |
# Flaskアプリケーションの設定
|
9 |
app = Flask(__name__)
|
10 |
-
app.config["REDIS_URL"] = "redis://localhost:6379/0"
|
11 |
-
app.register_blueprint(sse, url_prefix='/stream')
|
12 |
|
13 |
# デバイスの設定
|
14 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
@@ -96,14 +92,6 @@ HTML_TEMPLATE = """
|
|
96 |
alert('エラーが発生しました。コンソールを確認してください。');
|
97 |
});
|
98 |
}
|
99 |
-
|
100 |
-
// SSEの設定
|
101 |
-
const eventSource = new EventSource("/stream");
|
102 |
-
|
103 |
-
eventSource.onmessage = function(event) {
|
104 |
-
const message = event.data;
|
105 |
-
addMessageToChat('assistant', message);
|
106 |
-
};
|
107 |
</script>
|
108 |
</body>
|
109 |
</html>
|
@@ -172,12 +160,6 @@ def generate():
|
|
172 |
return jsonify({"error": "role_instruction and new_conversation are required fields"}), 400
|
173 |
|
174 |
formatted_output_all, response = generate_response(role_instruction, conversation_history, new_conversation)
|
175 |
-
|
176 |
-
# ここでSSEを介してリアルタイムで応答をストリームします
|
177 |
-
for word in response.split():
|
178 |
-
sse.publish({"message": word}, type='message')
|
179 |
-
time.sleep(0.5) # 送信間隔をシミュレート
|
180 |
-
|
181 |
return jsonify({"response": response, "conversation_history": conversation_history})
|
182 |
|
183 |
if __name__ == '__main__':
|
|
|
2 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
from flask import Flask, request, jsonify, render_template_string
|
4 |
import time
|
|
|
|
|
5 |
|
6 |
# Flaskアプリケーションの設定
|
7 |
app = Flask(__name__)
|
|
|
|
|
8 |
|
9 |
# デバイスの設定
|
10 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
|
92 |
alert('エラーが発生しました。コンソールを確認してください。');
|
93 |
});
|
94 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
95 |
</script>
|
96 |
</body>
|
97 |
</html>
|
|
|
160 |
return jsonify({"error": "role_instruction and new_conversation are required fields"}), 400
|
161 |
|
162 |
formatted_output_all, response = generate_response(role_instruction, conversation_history, new_conversation)
|
|
|
|
|
|
|
|
|
|
|
|
|
163 |
return jsonify({"response": response, "conversation_history": conversation_history})
|
164 |
|
165 |
if __name__ == '__main__':
|