He Bo commited on
Commit
9d2b618
1 Parent(s): e279c19
Files changed (1) hide show
  1. app.py +13 -2
app.py CHANGED
@@ -11,6 +11,10 @@ api = invoke_url + '/langchain_processor_qa?query='
11
  chinese_index = "chinese_bge_test_0916"
12
  english_index = "smart_search_qa_demo_0618_en_2"
13
 
 
 
 
 
14
  chinese_prompt = """给定一个长文档和一个问题的以下提取部分,如果你不知道答案,就说你不知道。不要试图编造答案。用中文回答。
15
 
16
  问题: {question}
@@ -184,7 +188,7 @@ def get_answer(task_type,question,session_id,language,prompt,search_engine,index
184
  return answer,confidence,source_str,url,request_time
185
 
186
 
187
- def get_summarize(texts,language,prompt):
188
 
189
  url = api + texts
190
  url += '&task=summarize'
@@ -205,6 +209,12 @@ def get_summarize(texts,language,prompt):
205
  # elif llm_instance == '8x':
206
  # url += ('&llm_embedding_name=pytorch-inference-chatglm-v1-8x')
207
 
 
 
 
 
 
 
208
  if len(prompt) > 0:
209
  url += ('&prompt='+prompt)
210
 
@@ -260,6 +270,7 @@ with demo:
260
  text_input = gr.Textbox(label="Input texts",lines=4)
261
  summarize_button = gr.Button("Summit")
262
  sm_language_radio = gr.Radio(["chinese", "english"],value="chinese",label="Language")
 
263
  # sm_llm_radio = gr.Radio(["2x", "8x"],value="2x",label="Chinese llm instance")
264
  sm_prompt_textbox = gr.Textbox(label="Prompt",lines=4, placeholder=EN_SUMMARIZE_PROMPT_TEMPLATE)
265
  with gr.Column():
@@ -267,7 +278,7 @@ with demo:
267
 
268
 
269
  qa_button.click(get_answer, inputs=[qa_task_radio,query_textbox,session_id_textbox,qa_language_radio,qa_prompt_textbox,qa_search_engine_radio,qa_index_textbox,qa_top_k_slider,score_type_checklist], outputs=qa_output)
270
- summarize_button.click(get_summarize, inputs=[text_input,sm_language_radio,sm_prompt_textbox], outputs=text_output)
271
 
272
  demo.launch()
273
  # demo.launch(share=True)
 
11
  chinese_index = "chinese_bge_test_0916"
12
  english_index = "smart_search_qa_demo_0618_en_2"
13
 
14
+
15
+ bedrock_url = 'https://bx2kc13ys3.execute-api.us-east-1.amazonaws.com/prod/bedrock?'
16
+
17
+
18
  chinese_prompt = """给定一个长文档和一个问题的以下提取部分,如果你不知道答案,就说你不知道。不要试图编造答案。用中文回答。
19
 
20
  问题: {question}
 
188
  return answer,confidence,source_str,url,request_time
189
 
190
 
191
+ def get_summarize(texts,language,model_type,prompt):
192
 
193
  url = api + texts
194
  url += '&task=summarize'
 
209
  # elif llm_instance == '8x':
210
  # url += ('&llm_embedding_name=pytorch-inference-chatglm-v1-8x')
211
 
212
+
213
+ if model_type == "claude2":
214
+ url += ('&model_type=bedrock')
215
+ url += ('&bedrock_api_url='+bedrock_url)
216
+ url += ('&bedrock_model_id=anthropic.claude-v2')
217
+
218
  if len(prompt) > 0:
219
  url += ('&prompt='+prompt)
220
 
 
270
  text_input = gr.Textbox(label="Input texts",lines=4)
271
  summarize_button = gr.Button("Summit")
272
  sm_language_radio = gr.Radio(["chinese", "english"],value="chinese",label="Language")
273
+ sm_model_type_radio = gr.Radio(["claude2","other"],value="other",label="Model type")
274
  # sm_llm_radio = gr.Radio(["2x", "8x"],value="2x",label="Chinese llm instance")
275
  sm_prompt_textbox = gr.Textbox(label="Prompt",lines=4, placeholder=EN_SUMMARIZE_PROMPT_TEMPLATE)
276
  with gr.Column():
 
278
 
279
 
280
  qa_button.click(get_answer, inputs=[qa_task_radio,query_textbox,session_id_textbox,qa_language_radio,qa_prompt_textbox,qa_search_engine_radio,qa_index_textbox,qa_top_k_slider,score_type_checklist], outputs=qa_output)
281
+ summarize_button.click(get_summarize, inputs=[text_input,sm_language_radio,sm_model_type_radio,sm_prompt_textbox], outputs=text_output)
282
 
283
  demo.launch()
284
  # demo.launch(share=True)