yeecin commited on
Commit
86fce4a
1 Parent(s): a4a4fed

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -46,7 +46,7 @@ import gradio as gr
46
  import torch
47
  import os
48
  from transformers import BlipForConditionalGeneration, BlipProcessor, GenerationConfig
49
-
50
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
51
 
52
  _MODEL_PATH = 'IDEA-CCNL/Taiyi-BLIP-750M-Chinese'
@@ -76,7 +76,7 @@ def inference(raw_image, model_n, strategy):
76
  # Nucleus sampling,即 top-p sampling,只保留累积概率大于 p 的词,然后重新归一化,得到一个新的概率分布,再从中采样,这样可以保证采样的结果更多样
77
  config = GenerationConfig(
78
  do_sample=True,
79
- top_p=0.9,
80
  max_length=50,
81
  min_length=5,
82
  )
 
46
  import torch
47
  import os
48
  from transformers import BlipForConditionalGeneration, BlipProcessor, GenerationConfig
49
+ print(torch.__version__)
50
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
51
 
52
  _MODEL_PATH = 'IDEA-CCNL/Taiyi-BLIP-750M-Chinese'
 
76
  # Nucleus sampling,即 top-p sampling,只保留累积概率大于 p 的词,然后重新归一化,得到一个新的概率分布,再从中采样,这样可以保证采样的结果更多样
77
  config = GenerationConfig(
78
  do_sample=True,
79
+ top_p=0.8,
80
  max_length=50,
81
  min_length=5,
82
  )