Spaces:
Running
on
Zero
Running
on
Zero
dfda
Browse files- command/interpreter.py +9 -0
- command/langchain.py +1 -1
command/interpreter.py
CHANGED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from interpreter import interpreter
|
2 |
+
# 環境変数でOpenAI APIキーを保存および使用
|
3 |
+
interpreter.auto_run = True
|
4 |
+
#interpreter.llm.model = "huggingface/meta-llama/Meta-Llama-3-8B-Instruct"
|
5 |
+
#interpreter.llm.api_key = os.getenv("hf_token")
|
6 |
+
interpreter.llm.api_base = "https://api.groq.com/openai/v1"
|
7 |
+
interpreter.llm.api_key = os.getenv("apikey")
|
8 |
+
interpreter.llm.model = "Llama3-70b-8192"
|
9 |
+
interpreter.chat()
|
command/langchain.py
CHANGED
@@ -12,7 +12,7 @@ from langchain_groq import ChatGroq
|
|
12 |
|
13 |
|
14 |
# Get Groq API key
|
15 |
-
groq_api_key = "
|
16 |
groq_chat = ChatGroq(groq_api_key=groq_api_key, model_name="llama3-70b-8192")
|
17 |
|
18 |
system_prompt = "あなたは便利なアシスタントです。"
|
|
|
12 |
|
13 |
|
14 |
# Get Groq API key
|
15 |
+
groq_api_key = os.getenv("apikey")
|
16 |
groq_chat = ChatGroq(groq_api_key=groq_api_key, model_name="llama3-70b-8192")
|
17 |
|
18 |
system_prompt = "あなたは便利なアシスタントです。"
|