umuthopeyildirim commited on
Commit
9a16917
1 Parent(s): 33ef098

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -1
app.py CHANGED
@@ -79,6 +79,40 @@ def predict(message, history):
79
  yield partial_message
80
 
81
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
82
  def generate_text(prompt, tokenizer, model):
83
  # Tokenize the input
84
  input_ids = tokenizer.encode(prompt, return_tensors="pt")
@@ -127,7 +161,7 @@ with gr.Blocks(css=css) as demo:
127
  # with gr.Tab("E-Commerce"):
128
  # e_commerce_interface()
129
  with gr.Tab("OpenBB"):
130
- gr.ChatInterface(predict, examples=[
131
  examples_openbb[0], examples_openbb[1], examples_openbb[2], examples_openbb[3]])
132
 
133
 
 
79
  yield partial_message
80
 
81
 
82
+ def predict2(message, history):
83
+
84
+ history_transformer_format = history + [[message, ""]]
85
+ stop = StopOnTokens()
86
+
87
+ messages = "".join(["".join(["\nuser :"+item[0], "\nbot:"+item[1]]) # curr_system_message +
88
+ for item in history_transformer_format])
89
+
90
+ print(messages)
91
+
92
+ model_inputs = tokenizer([messages], return_tensors="pt")
93
+ streamer = TextIteratorStreamer(
94
+ tokenizer, timeout=10., skip_prompt=True, skip_special_tokens=True)
95
+ generate_kwargs = dict(
96
+ model_inputs,
97
+ streamer=streamer,
98
+ max_new_tokens=1024,
99
+ do_sample=True,
100
+ top_p=0.95,
101
+ top_k=1000,
102
+ temperature=0.5,
103
+ num_beams=1,
104
+ stopping_criteria=StoppingCriteriaList([stop])
105
+ )
106
+ t = Thread(target=model.generate, kwargs=generate_kwargs)
107
+ t.start()
108
+
109
+ partial_message = ""
110
+ for new_token in streamer:
111
+ if new_token != '<':
112
+ partial_message += new_token
113
+ yield partial_message
114
+
115
+
116
  def generate_text(prompt, tokenizer, model):
117
  # Tokenize the input
118
  input_ids = tokenizer.encode(prompt, return_tensors="pt")
 
161
  # with gr.Tab("E-Commerce"):
162
  # e_commerce_interface()
163
  with gr.Tab("OpenBB"):
164
+ gr.ChatInterface(predict2, examples=[
165
  examples_openbb[0], examples_openbb[1], examples_openbb[2], examples_openbb[3]])
166
 
167