acecalisto3 commited on
Commit
0ac1b6c
1 Parent(s): cb01134

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -40
app.py CHANGED
@@ -25,15 +25,15 @@ now = datetime.now()
25
  date_time_str = now.strftime("%Y-%m-%d %H:%M:%S")
26
 
27
  client = InferenceClient(
28
- "mistralai/Mixtral-8x7B-Instruct-v0.1"
29
  )
30
 
 
31
  ############################################
32
 
33
 
34
  VERBOSE = True
35
- MAX_HISTORY = 100
36
- #MODEL = "gpt-3.5-turbo" # "gpt-4"
37
 
38
 
39
  def format_prompt(message, history):
@@ -59,7 +59,7 @@ def run_gpt(
59
  temperature=1.0,
60
  max_new_tokens=2096,
61
  top_p=0.99,
62
- repetition_penalty=1.0,
63
  do_sample=True,
64
  seed=seed,
65
  )
@@ -91,7 +91,7 @@ def compress_history(purpose, task, history, directory):
91
  resp = run_gpt(
92
  COMPRESS_HISTORY_PROMPT,
93
  stop_tokens=["observation:", "task:", "action:", "thought:"],
94
- max_tokens=512,
95
  purpose=purpose,
96
  task=task,
97
  history=history,
@@ -122,8 +122,8 @@ def call_search(purpose, task, history, directory, action_input):
122
  def call_main(purpose, task, history, directory, action_input):
123
  resp = run_gpt(
124
  ACTION_PROMPT,
125
- stop_tokens=["observation:", "task:", "action:","thought:"],
126
- max_tokens=2096,
127
  purpose=purpose,
128
  task=task,
129
  history=history,
@@ -159,7 +159,7 @@ def call_set_task(purpose, task, history, directory, action_input):
159
  task = run_gpt(
160
  TASK_PROMPT,
161
  stop_tokens=[],
162
- max_tokens=64,
163
  purpose=purpose,
164
  task=task,
165
  history=history,
@@ -259,7 +259,7 @@ agents =[
259
  "PYTHON_CODE_DEV"
260
  ]
261
  def generate(
262
- prompt, history, agent_name=agents[0], sys_prompt="", temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
263
  ):
264
  seed = random.randint(1,1111111111111111)
265
 
@@ -348,42 +348,22 @@ additional_inputs=[
348
 
349
  ]
350
 
351
- examples=[["What are the biggest news stories today?", None, None, None, None, None, ],
352
- ["When is the next full moon?", None, None, None, None, None, ],
353
- ["I'm planning a vacation to Japan. Can you suggest a one-week itinerary including must-visit places and local cuisines to try?", None, None, None, None, None, ],
354
- ["Can you write a short story about a time-traveling detective who solves historical mysteries?", None, None, None, None, None,],
355
- ["I'm trying to learn French. Can you provide some common phrases that would be useful for a beginner, along with their pronunciations?", None, None, None, None, None,],
356
- ["I have chicken, rice, and bell peppers in my kitchen. Can you suggest an easy recipe I can make with these ingredients?", None, None, None, None, None,],
357
- ["Can you explain how the QuickSort algorithm works and provide a Python implementation?", None, None, None, None, None,],
358
- ["What are some unique features of Rust that make it stand out compared to other systems programming languages like C++?", None, None, None, None, None,],
359
  ]
360
 
361
- '''
362
  gr.ChatInterface(
363
  fn=run,
364
- chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel"),
365
- title="Mixtral 46.7B\nMicro-Agent\nInternet Search <br> development test",
366
  examples=examples,
367
  concurrency_limit=20,
368
- with gr.Blocks() as ifacea:
369
- gr.HTML("""TEST""")
370
- ifacea.launch()
371
- ).launch()
372
  with gr.Blocks() as iface:
373
- #chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel"),
374
- chatbot=gr.Chatbot()
375
- msg = gr.Textbox()
376
- with gr.Row():
377
- submit_b = gr.Button()
378
- clear = gr.ClearButton([msg, chatbot])
379
- submit_b.click(run, [msg,chatbot],[msg,chatbot])
380
- msg.submit(run, [msg, chatbot], [msg, chatbot])
381
  iface.launch()
382
- '''
383
- gr.ChatInterface(
384
- fn=run,
385
- chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, super-intelligence=True, layout="panel"),
386
- title="Mixtral 46.7B\nMicro-Agent\nInternet Search <br> development test",
387
- examples=examples,
388
- concurrency_limit=50,
389
- ).launch(show_api=True)
 
25
  date_time_str = now.strftime("%Y-%m-%d %H:%M:%S")
26
 
27
  client = InferenceClient(
28
+ "mistralai/Mixtral-8x7B-Instruct-v0.1",
29
  )
30
 
31
+
32
  ############################################
33
 
34
 
35
  VERBOSE = True
36
+ MAX_HISTORY = 125
 
37
 
38
 
39
  def format_prompt(message, history):
 
59
  temperature=1.0,
60
  max_new_tokens=2096,
61
  top_p=0.99,
62
+ repetition_penalty=1.7,
63
  do_sample=True,
64
  seed=seed,
65
  )
 
91
  resp = run_gpt(
92
  COMPRESS_HISTORY_PROMPT,
93
  stop_tokens=["observation:", "task:", "action:", "thought:"],
94
+ max_tokens=5096,
95
  purpose=purpose,
96
  task=task,
97
  history=history,
 
122
  def call_main(purpose, task, history, directory, action_input):
123
  resp = run_gpt(
124
  ACTION_PROMPT,
125
+ stop_tokens=["observation:", "task:", "action:","though:"],
126
+ max_tokens=5096,
127
  purpose=purpose,
128
  task=task,
129
  history=history,
 
159
  task = run_gpt(
160
  TASK_PROMPT,
161
  stop_tokens=[],
162
+ max_tokens=2048,
163
  purpose=purpose,
164
  task=task,
165
  history=history,
 
259
  "PYTHON_CODE_DEV"
260
  ]
261
  def generate(
262
+ prompt, history, agent_name=agents[0], sys_prompt="", temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.7,
263
  ):
264
  seed = random.randint(1,1111111111111111)
265
 
 
348
 
349
  ]
350
 
351
+
352
+ examples=[["Based on previous interactions, generate an interactive preview of the user's requested application.", None, None, None, None, None, ],
353
+ ["Utilize the relevant code snippets and components from previous interactions.", None, None, None, None, None, ],
354
+ ["Assemble a working demo that showcases the core functionality of the application.", None, None, None, None, None, ],
355
+ ["Present the demo in an interactive environment within the Gradio interface.", None, None, None, None, None,],
356
+ ["Allow the user to explore and interact with the demo to test its features.", None, None, None, None, None,],
357
+ ["Gather feedback from the user about the demo and potential improvements.", None, None, None, None, None,],
358
+ ["If the user approves of the app's running state you should provide a bash script that will automate all aspects of a local run and also a docker image for ease-of-launch in addition to the huggingface-ready app.py with all functions and gui and the requirements.txt file comprised of all required libraries and packages the application is dependent on, avoiding openai api at all points as we only use huggingface transformers, models, agents, libraries, api.", None, None, None, None, None,],
359
  ]
360
 
361
+
362
  gr.ChatInterface(
363
  fn=run,
364
+ title="""Fragmixt\nAgents With Agents,\nSurf With a Purpose""",
 
365
  examples=examples,
366
  concurrency_limit=20,
 
 
 
 
367
  with gr.Blocks() as iface:
 
 
 
 
 
 
 
 
368
  iface.launch()
369
+ )