tdeshane commited on
Commit
638b146
1 Parent(s): 38614a2
Files changed (1) hide show
  1. app.py +107 -1
app.py CHANGED
@@ -14,9 +14,98 @@ from collections import defaultdict
14
  import os
15
 
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  got_csv = False
18
 
19
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  def find_most_valuable_feature(csv_file):
21
  print("find_most_valuable_feature")
22
  print(csv_file)
@@ -75,6 +164,12 @@ def find_most_valuable_feature(csv_file):
75
  return image_buffer
76
 
77
 
 
 
 
 
 
 
78
 
79
 
80
 
@@ -109,7 +204,7 @@ def process_and_analyze_data(csv_file):
109
 
110
  @cl.on_message
111
  async def handle_message(message: cl.Message):
112
- global got_csv
113
  # Retrieve the CSV file from the message
114
  csv_file = next(
115
  (
@@ -139,6 +234,12 @@ async def handle_message(message: cl.Message):
139
 
140
  generated_image = cl.user_session.get(name)
141
 
 
 
 
 
 
 
142
  elements = []
143
  actions = []
144
 
@@ -159,6 +260,11 @@ async def handle_message(message: cl.Message):
159
  else:
160
  if not got_csv:
161
  await cl.Message(content="Please upload a CSV file.").send()
 
 
 
 
 
162
 
163
 
164
  # Run the ChainLit app
 
14
  import os
15
 
16
 
17
+ from langchain.agents import AgentExecutor, AgentType, initialize_agent
18
+ from langchain.agents.structured_chat.prompt import SUFFIX
19
+ from langchain.chat_models import ChatOpenAI
20
+ from langchain.memory import ConversationBufferMemory
21
+
22
+ from chainlit.action import Action
23
+ from chainlit.input_widget import Select, Switch, Slider
24
+
25
+ from langchain.tools import StructuredTool, Tool
26
+
27
+
28
+ # this is our tool - which is what allows our agent to generate images in the first place!
29
+ # the `description` field is of utmost imporance as it is what the LLM "brain" uses to determine
30
+ # which tool to use for a given input.
31
+
32
+
33
+
34
  got_csv = False
35
 
36
 
37
+ @cl.on_chat_start
38
+ async def start():
39
+ """
40
+ This is called when the Chainlit chat is started!
41
+ We can add some settings to our application to allow users to select the appropriate model, and more!
42
+ """
43
+ settings = await cl.ChatSettings(
44
+ [
45
+ Select(
46
+ id="Model",
47
+ label="OpenAI - Model",
48
+ values=["gpt-3.5-turbo", "gpt-4-1106-preview"],
49
+ initial_index=1,
50
+ ),
51
+ Switch(id="Streaming", label="OpenAI - Stream Tokens", initial=True),
52
+ Slider(
53
+ id="Temperature",
54
+ label="OpenAI - Temperature",
55
+ initial=0,
56
+ min=0,
57
+ max=2,
58
+ step=0.1,
59
+ ),
60
+ ]
61
+ ).send()
62
+ await setup_agent(settings)
63
+
64
+
65
+ @cl.on_settings_update
66
+ async def setup_agent(settings):
67
+ print("Setup agent with following settings: ", settings)
68
+
69
+ # We set up our agent with the user selected (or default) settings here.
70
+ llm = ChatOpenAI(
71
+ temperature=settings["Temperature"],
72
+ streaming=settings["Streaming"],
73
+ model=settings["Model"],
74
+ )
75
+
76
+ # We get our memory here, which is used to track the conversation history.
77
+ memory = get_memory()
78
+
79
+ # This suffix is used to provide the chat history to the prompt.
80
+ _SUFFIX = "Chat history:\n{chat_history}\n\n" + SUFFIX
81
+
82
+ # We initialize our agent here, which is simply being used to decide between responding with text
83
+ # or an image
84
+ agent = initialize_agent(
85
+ llm=llm, # our LLM (default is GPT-4 Turbo)
86
+ tools = [
87
+ generate_most_valuable_feature
88
+ ],
89
+ agent=AgentType.STRUCTURED_CHAT_ZERO_SHOT_REACT_DESCRIPTION, # the agent type we're using today
90
+ memory=memory, # our memory!
91
+ agent_kwargs={
92
+ "suffix": _SUFFIX, # adding our chat history suffix
93
+ "input_variables": ["input", "agent_scratchpad", "chat_history"],
94
+ },
95
+ )
96
+ cl.user_session.set("agent", agent) # storing our agent in the user session
97
+
98
+
99
+
100
+ @cl.cache
101
+ def get_memory():
102
+ """
103
+ This is used to track the conversation history and allow our agent to
104
+ remember what was said before.
105
+ """
106
+ return ConversationBufferMemory(memory_key="chat_history")
107
+
108
+
109
  def find_most_valuable_feature(csv_file):
110
  print("find_most_valuable_feature")
111
  print(csv_file)
 
164
  return image_buffer
165
 
166
 
167
+ generate_most_valuable_feature = Tool.from_function(
168
+ func=find_most_valuable_feature,
169
+ name="Find most valuable feature",
170
+ description=f"Useful for finding the most valuable feature from a CSV file",
171
+ return_direct=True,
172
+ )
173
 
174
 
175
 
 
204
 
205
  @cl.on_message
206
  async def handle_message(message: cl.Message):
207
+ global got_csv, agent
208
  # Retrieve the CSV file from the message
209
  csv_file = next(
210
  (
 
234
 
235
  generated_image = cl.user_session.get(name)
236
 
237
+ agent = cl.user_session.get("agent")
238
+
239
+ res = await cl.make_async(agent.run)(
240
+ input=message.content, callbacks=[cl.LangchainCallbackHandler()]
241
+ )
242
+
243
  elements = []
244
  actions = []
245
 
 
260
  else:
261
  if not got_csv:
262
  await cl.Message(content="Please upload a CSV file.").send()
263
+ else:
264
+ res = await cl.make_async(agent.run)(
265
+ input=message.content, callbacks=[cl.LangchainCallbackHandler()]
266
+ )
267
+ await cl.Message(content=res).send()
268
 
269
 
270
  # Run the ChainLit app