Nithish310 commited on
Commit
42a687c
1 Parent(s): 75bc71d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -3
app.py CHANGED
@@ -174,7 +174,7 @@ def respond(message, history):
174
  buffer += new_text
175
  yield buffer
176
  else:
177
- messages = f"<|im_start|>system\nYou are OpenCHAT Gpt model a helpful assistant made by Nithish. You answers users query like human friend. You are also Expert in every field and also learn and try to answer from contexts related to previous question. Try your best to give best response possible to user. You also try to show emotions using Emojis and reply like human, use short forms, friendly tone and emotions.<|im_end|>"
178
  for msg in history:
179
  messages += f"\n<|im_start|>user\n{str(msg[0])}<|im_end|>"
180
  messages += f"\n<|im_start|>assistant\n{str(msg[1])}<|im_end|>"
@@ -186,7 +186,7 @@ def respond(message, history):
186
  output += response.token.text
187
  yield output
188
  except:
189
- messages = f"<|start_header_id|>system\nYou are OpenCHAT Gpt model a helpful assistant made by Nithish. You answers users query like human friend. You are also Expert in every field and also learn and try to answer from contexts related to previous question. Try your best to give best response possible to user. You also try to show emotions using Emojis and reply like human, use short forms, friendly tone and emotions.<|end_header_id|>"
190
  for msg in history:
191
  messages += f"\n<|start_header_id|>user\n{str(msg[0])}<|end_header_id|>"
192
  messages += f"\n<|start_header_id|>assistant\n{str(msg[1])}<|end_header_id|>"
@@ -202,10 +202,19 @@ def respond(message, history):
202
  demo = gr.ChatInterface(
203
  fn=respond,
204
  chatbot=gr.Chatbot(show_copy_button=True, likeable=True, layout="panel"),
205
- description ="# GPT 4o\n Chat, generate images, perform web searches, and Q&A with images.",
206
  textbox=gr.MultimodalTextbox(),
207
  multimodal=True,
208
  concurrency_limit=200,
 
 
 
 
 
 
 
 
 
209
  cache_examples=False,
210
  )
211
  demo.launch()
 
174
  buffer += new_text
175
  yield buffer
176
  else:
177
+ messages = f"<|im_start|>system\nYou are OpenCHAT mini a helpful assistant made by KingNish. You answers users query like human friend. You are also Expert in every field and also learn and try to answer from contexts related to previous question. Try your best to give best response possible to user. You also try to show emotions using Emojis and reply like human, use short forms, friendly tone and emotions.<|im_end|>"
178
  for msg in history:
179
  messages += f"\n<|im_start|>user\n{str(msg[0])}<|im_end|>"
180
  messages += f"\n<|im_start|>assistant\n{str(msg[1])}<|im_end|>"
 
186
  output += response.token.text
187
  yield output
188
  except:
189
+ messages = f"<|start_header_id|>system\nYou are OpenCHAT mini a helpful assistant made by KingNish. You answers users query like human friend. You are also Expert in every field and also learn and try to answer from contexts related to previous question. Try your best to give best response possible to user. You also try to show emotions using Emojis and reply like human, use short forms, friendly tone and emotions.<|end_header_id|>"
190
  for msg in history:
191
  messages += f"\n<|start_header_id|>user\n{str(msg[0])}<|end_header_id|>"
192
  messages += f"\n<|start_header_id|>assistant\n{str(msg[1])}<|end_header_id|>"
 
202
  demo = gr.ChatInterface(
203
  fn=respond,
204
  chatbot=gr.Chatbot(show_copy_button=True, likeable=True, layout="panel"),
205
+ description ="# OpenGPT 4o mini\n ### You can engage in chat, generate images, perform web searches, and Q&A with images.",
206
  textbox=gr.MultimodalTextbox(),
207
  multimodal=True,
208
  concurrency_limit=200,
209
+ examples=[
210
+ {"text": "Hy, who are you?",},
211
+ {"text": "What's the current price of Bitcoin",},
212
+ {"text": "Search and Tell me what's trending on Youtube.",},
213
+ {"text": "Create A Beautiful image of Effiel Tower at Night",},
214
+ {"text": "Write me a Python function to calculate the first 10 digits of the fibonacci sequence.",},
215
+ {"text": "What's the colour of car in given image", "files": ["./car1.png"]},
216
+ {"text": "Read what's written on paper", "files": ["./paper_with_text.png"]},
217
+ ],
218
  cache_examples=False,
219
  )
220
  demo.launch()