He Bo commited on
Commit
4335878
1 Parent(s): 50ee34c
Files changed (1) hide show
  1. app.py +8 -7
app.py CHANGED
@@ -149,7 +149,7 @@ def get_answer(question,session_id,language,prompt,index,top_k,temperature):
149
  return answer,confidence,source_str,url,request_time
150
 
151
 
152
- def get_summarize(texts,language,llm_instance,prompt):
153
 
154
  url = api + texts
155
  url += '&task=summarize'
@@ -164,10 +164,11 @@ def get_summarize(texts,language,llm_instance,prompt):
164
  url += '&language=chinese'
165
  url += ('&embedding_endpoint_name=huggingface-inference-text2vec-base-chinese-v1')
166
  # url += ('&prompt='+zh_prompt_template)
167
- if llm_instance == '2x':
168
- url += ('&llm_embedding_name=pytorch-inference-chatglm-v1')
169
- elif llm_instance == '8x':
170
- url += ('&llm_embedding_name=pytorch-inference-chatglm-v1-8x')
 
171
 
172
  if len(prompt) > 0:
173
  url += ('&prompt='+prompt)
@@ -220,14 +221,14 @@ with demo:
220
  text_input = gr.Textbox(label="Input texts",lines=4)
221
  summarize_button = gr.Button("Summit")
222
  sm_language_radio = gr.Radio(["chinese", "english"],value="chinese",label="Language")
223
- sm_llm_radio = gr.Radio(["2x", "8x"],value="2x",label="Chinese llm instance")
224
  sm_prompt_textbox = gr.Textbox(label="Prompt",lines=4, placeholder=EN_SUMMARIZE_PROMPT_TEMPLATE)
225
  with gr.Column():
226
  text_output = gr.Textbox()
227
 
228
 
229
  qa_button.click(get_answer, inputs=[query_textbox,session_id_textbox,qa_language_radio,qa_prompt_textbox,qa_index_textbox,qa_top_k_slider,temperature_slider], outputs=qa_output)
230
- summarize_button.click(get_summarize, inputs=[text_input,sm_language_radio,sm_llm_radio,sm_prompt_textbox], outputs=text_output)
231
 
232
  demo.launch()
233
  # smart_qa.launch(share=True)
 
149
  return answer,confidence,source_str,url,request_time
150
 
151
 
152
+ def get_summarize(texts,language,prompt):
153
 
154
  url = api + texts
155
  url += '&task=summarize'
 
164
  url += '&language=chinese'
165
  url += ('&embedding_endpoint_name=huggingface-inference-text2vec-base-chinese-v1')
166
  # url += ('&prompt='+zh_prompt_template)
167
+ url += ('&llm_embedding_name=pytorch-inference-chatglm-v1')
168
+ # if llm_instance == '2x':
169
+ # url += ('&llm_embedding_name=pytorch-inference-chatglm-v1')
170
+ # elif llm_instance == '8x':
171
+ # url += ('&llm_embedding_name=pytorch-inference-chatglm-v1-8x')
172
 
173
  if len(prompt) > 0:
174
  url += ('&prompt='+prompt)
 
221
  text_input = gr.Textbox(label="Input texts",lines=4)
222
  summarize_button = gr.Button("Summit")
223
  sm_language_radio = gr.Radio(["chinese", "english"],value="chinese",label="Language")
224
+ # sm_llm_radio = gr.Radio(["2x", "8x"],value="2x",label="Chinese llm instance")
225
  sm_prompt_textbox = gr.Textbox(label="Prompt",lines=4, placeholder=EN_SUMMARIZE_PROMPT_TEMPLATE)
226
  with gr.Column():
227
  text_output = gr.Textbox()
228
 
229
 
230
  qa_button.click(get_answer, inputs=[query_textbox,session_id_textbox,qa_language_radio,qa_prompt_textbox,qa_index_textbox,qa_top_k_slider,temperature_slider], outputs=qa_output)
231
+ summarize_button.click(get_summarize, inputs=[text_input,sm_language_radio,sm_prompt_textbox], outputs=text_output)
232
 
233
  demo.launch()
234
  # smart_qa.launch(share=True)