Spaces:
Sleeping
Sleeping
JarvisChan630
commited on
Commit
•
49bd427
1
Parent(s):
75d4aef
fix bugs
Browse files- agents/meta_agent.py +1 -1
- chat.py +2 -2
- legacy/chat copy.py +1 -1
- tools/legacy/offline_graph_rag_tool copy.py +1 -1
- tools/legacy/rag_tool.py +1 -1
- tools/offline_graph_rag_tool.py +1 -1
- tools/offline_graph_rag_tool_with_async.py +1 -1
agents/meta_agent.py
CHANGED
@@ -405,7 +405,7 @@ if __name__ == "__main__":
|
|
405 |
}
|
406 |
|
407 |
agent_kwargs = {
|
408 |
-
"model": "gpt-
|
409 |
"server": "openai",
|
410 |
"temperature": 0.1
|
411 |
}
|
|
|
405 |
}
|
406 |
|
407 |
agent_kwargs = {
|
408 |
+
"model": "gpt-4o-mini",
|
409 |
"server": "openai",
|
410 |
"temperature": 0.1
|
411 |
}
|
chat.py
CHANGED
@@ -40,12 +40,12 @@ def get_agent_kwargs(server: str = "claude", location: str = None, hybrid: bool
|
|
40 |
|
41 |
if server == "openai":
|
42 |
agent_kwargs = {
|
43 |
-
"model": "gpt-
|
44 |
"server": "openai",
|
45 |
"temperature": 0,
|
46 |
}
|
47 |
agent_kwargs_meta_expert = agent_kwargs.copy()
|
48 |
-
agent_kwargs_meta_expert["model"] = "
|
49 |
|
50 |
# Mistral
|
51 |
elif server == "mistral":
|
|
|
40 |
|
41 |
if server == "openai":
|
42 |
agent_kwargs = {
|
43 |
+
"model": "gpt-4o-mini",
|
44 |
"server": "openai",
|
45 |
"temperature": 0,
|
46 |
}
|
47 |
agent_kwargs_meta_expert = agent_kwargs.copy()
|
48 |
+
agent_kwargs_meta_expert["model"] = "gpt-4o-mini"
|
49 |
|
50 |
# Mistral
|
51 |
elif server == "mistral":
|
legacy/chat copy.py
CHANGED
@@ -36,7 +36,7 @@ def get_agent_kwargs(server: str = "claude", location: str = None, hybrid: bool
|
|
36 |
|
37 |
if server == "openai":
|
38 |
agent_kwargs = {
|
39 |
-
"model": "gpt-
|
40 |
"server": "openai",
|
41 |
"temperature": 0,
|
42 |
}
|
|
|
36 |
|
37 |
if server == "openai":
|
38 |
agent_kwargs = {
|
39 |
+
"model": "gpt-4o-mini",
|
40 |
"server": "openai",
|
41 |
"temperature": 0,
|
42 |
}
|
tools/legacy/offline_graph_rag_tool copy.py
CHANGED
@@ -331,7 +331,7 @@ def create_graph_index(
|
|
331 |
if os.environ.get('LLM_SERVER') == "openai":
|
332 |
# require hundreds calls to api
|
333 |
# we create index for every small chunk
|
334 |
-
llm = ChatOpenAI(temperature=0, model_name="gpt-
|
335 |
|
336 |
else:
|
337 |
llm = ChatAnthropic(temperature=0, model_name="claude-3-haiku-20240307")
|
|
|
331 |
if os.environ.get('LLM_SERVER') == "openai":
|
332 |
# require hundreds calls to api
|
333 |
# we create index for every small chunk
|
334 |
+
llm = ChatOpenAI(temperature=0, model_name="gpt-4o-mini-2024-07-18")
|
335 |
|
336 |
else:
|
337 |
llm = ChatAnthropic(temperature=0, model_name="claude-3-haiku-20240307")
|
tools/legacy/rag_tool.py
CHANGED
@@ -318,7 +318,7 @@ def create_graph_index(
|
|
318 |
graph: Neo4jGraph = None,
|
319 |
max_threads: int = 5
|
320 |
) -> Neo4jGraph:
|
321 |
-
llm = ChatOpenAI(temperature=0, model_name="gpt-
|
322 |
|
323 |
# llm = ChatAnthropic(temperature=0, model_name="claude-3-haiku-20240307")
|
324 |
|
|
|
318 |
graph: Neo4jGraph = None,
|
319 |
max_threads: int = 5
|
320 |
) -> Neo4jGraph:
|
321 |
+
llm = ChatOpenAI(temperature=0, model_name="gpt-4o-mini-2024-07-18")
|
322 |
|
323 |
# llm = ChatAnthropic(temperature=0, model_name="claude-3-haiku-20240307")
|
324 |
|
tools/offline_graph_rag_tool.py
CHANGED
@@ -324,7 +324,7 @@ def create_graph_index(
|
|
324 |
) -> Neo4jGraph:
|
325 |
|
326 |
if os.environ.get('LLM_SERVER') == "openai":
|
327 |
-
llm = ChatOpenAI(temperature=0, model_name="gpt-
|
328 |
else:
|
329 |
llm = ChatAnthropic(temperature=0, model_name="claude-3-haiku-20240307")
|
330 |
|
|
|
324 |
) -> Neo4jGraph:
|
325 |
|
326 |
if os.environ.get('LLM_SERVER') == "openai":
|
327 |
+
llm = ChatOpenAI(temperature=0, model_name="gpt-4o-mini-2024-07-18")
|
328 |
else:
|
329 |
llm = ChatAnthropic(temperature=0, model_name="claude-3-haiku-20240307")
|
330 |
|
tools/offline_graph_rag_tool_with_async.py
CHANGED
@@ -321,7 +321,7 @@ def create_graph_index(
|
|
321 |
) -> Neo4jGraph:
|
322 |
|
323 |
if os.environ.get('LLM_SERVER') == "openai":
|
324 |
-
llm = ChatOpenAI(temperature=0, model_name="gpt-
|
325 |
else:
|
326 |
llm = ChatAnthropic(temperature=0, model_name="claude-3-haiku-20240307")
|
327 |
|
|
|
321 |
) -> Neo4jGraph:
|
322 |
|
323 |
if os.environ.get('LLM_SERVER') == "openai":
|
324 |
+
llm = ChatOpenAI(temperature=0, model_name="gpt-4o-mini-2024-07-18")
|
325 |
else:
|
326 |
llm = ChatAnthropic(temperature=0, model_name="claude-3-haiku-20240307")
|
327 |
|