YaphetYan commited on
Commit
695df9a
1 Parent(s): 580cc25

test: 打出来类型 一个个看看

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -2,9 +2,9 @@ import torch
2
  import gradio as gr
3
  from transformers import AutoModel, pipeline, AutoTokenizer
4
 
5
- # from issue: https://discuss.huggingface.co/t/how-to-install-flash-attention-on-hf-gradio-space/70698/2
6
  import subprocess
7
 
 
8
  # InternVL2 需要的 flash_attn 这个依赖只能这样运行时装
9
  subprocess.run(
10
  "pip install flash-attn --no-build-isolation",
@@ -35,6 +35,7 @@ except Exception as error:
35
 
36
  def predict(input_img, questions):
37
  try:
 
38
  predictions = inference(question=questions, image=input_img)
39
  return str(predictions)
40
  except Exception as e:
@@ -51,7 +52,7 @@ gradio_app = gr.Interface(
51
  "text",
52
  ],
53
  outputs="text",
54
- title="Plz ask my anything",
55
  )
56
 
57
  if __name__ == "__main__":
 
2
  import gradio as gr
3
  from transformers import AutoModel, pipeline, AutoTokenizer
4
 
 
5
  import subprocess
6
 
7
+ # from issue: https://discuss.huggingface.co/t/how-to-install-flash-attention-on-hf-gradio-space/70698/2
8
  # InternVL2 需要的 flash_attn 这个依赖只能这样运行时装
9
  subprocess.run(
10
  "pip install flash-attn --no-build-isolation",
 
35
 
36
  def predict(input_img, questions):
37
  try:
38
+ gr.Info(str(type(inference)))
39
  predictions = inference(question=questions, image=input_img)
40
  return str(predictions)
41
  except Exception as e:
 
52
  "text",
53
  ],
54
  outputs="text",
55
+ title=str(type(inference)),
56
  )
57
 
58
  if __name__ == "__main__":