import gradio as gr from transformers import AutoModelForCausalLM, AutoTokenizer from transformers.generation import GenerationConfig import re from pathlib import Path import secrets import torch # Initialize the model and tokenizer model_name = "qwen/Qwen-VL-Chat" tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True) model = AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True).eval() model.generation_config = GenerationConfig.from_pretrained(model_name, trust_remote_code=True) # Set device for model device = "cuda" if torch.cuda.is_available() else "cpu" model.to(device) def save_image(image_file, upload_dir: str) -> str: Path(upload_dir).mkdir(parents=True, exist_ok=True) filename = secrets.token_hex(10) + Path(image_file.name).suffix file_path = Path(upload_dir) / filename with open(image_file, "rb") as f_input, open(file_path, "wb") as f_output: f_output.write(f_input.read()) return str(file_path) def clean_response(response: str) -> str: response = re.sub(r'(.*?)(?:.*?)*(?:.*?)*', r'\1', response).strip() return response def chat_with_model(image_path=None, text_query=None, history=None): query_elements = [] if image_path: query_elements.append({'image': image_path}) if text_query: query_elements.append({'text': text_query}) query = tokenizer.from_list_format(query_elements) tokenized_inputs = tokenizer(query, return_tensors='pt').to(device) output = model.generate(**tokenized_inputs) response = tokenizer.decode(output[0], skip_special_tokens=True) cleaned_response = clean_response(response) return cleaned_response def process_input(text, file): image_path = None if file is not None: image_path = save_image(file, "uploaded_images") response = chat_with_model(image_path=image_path, text_query=text) return response with gr.Blocks(theme=ParityError/Anime) as demo: gr.Markdown(""" # 🙋🏻‍♂️欢迎来到🌟Tonic 的🦆Qwen-VL-Chat🤩Bot!🚀 # 🙋🏻‍♂️Welcome toTonic's Qwen-VL-Chat Bot! 该WebUI基于Qwen-VL-Chat,实现聊天机器人功能。 但我必须解决它的很多问题,也许我也能获得一些荣誉。 Qwen-VL-Chat 是一种多模式输入模型。 您可以使用此空间来测试当前模型 [qwen/Qwen-VL-Chat](https://huggingface.co/qwen/Qwen-VL-Chat) 您也可以使用 🧑🏻‍🚀qwen/Qwen-VL -通过克隆这个空间来聊天🚀。 🧬🔬🔍 只需点击这里:[重复空间](https://huggingface.co/spaces/Tonic1/VLChat?duplicate=true) 加入我们:🌟TeamTonic🌟总是在制作很酷的演示! 在 👻Discord 上加入我们活跃的构建者🛠️社区:[Discord](https://discord.gg/nXx5wbX9) 在 🤗Huggingface 上:[TeamTonic](https://huggingface.co/TeamTonic) 和 [MultiTransformer](https:/ /huggingface.co/MultiTransformer) 在 🌐Github 上:[Polytonic](https://github.com/tonic-ai) 并为 🌟 [PolyGPT](https://github.com/tonic-ai/polygpt-alpha) 做出贡献 ) This WebUI is based on Qwen-VL-Chat, implementing chatbot functionalities. Qwen-VL-Chat is a multimodal input model. You can use this Space to test out the current model [qwen/Qwen-VL-Chat](https://huggingface.co/qwen/Qwen-VL-Chat) You can also use qwen/Qwen-VL-Chat🚀 by cloning this space. Simply click here: [Duplicate Space](https://huggingface.co/spaces/Tonic1/VLChat?duplicate=true) Join us: TeamTonic is always making cool demos! Join our active builder's community on Discord: [Discord](https://discord.gg/nXx5wbX9) On Huggingface: [TeamTonic](https://huggingface.co/TeamTonic) & [MultiTransformer](https://huggingface.co/MultiTransformer) On Github: [Polytonic](https://github.com/tonic-ai) & contribute to [PolyGPT](https://github.com/tonic-ai/polygpt-alpha) """) with gr.Row(): with gr.Column(scale=1): chatbot = gr.Chatbot(label='Qwen-VL-Chat') with gr.Column(scale=1): with gr.Row(): query = gr.Textbox(lines=2, label='Input', placeholder="Type your message here...") file_upload = gr.File(label="Upload Image") submit_btn = gr.Button("Submit") submit_btn.click( fn=process_input, inputs=[query, file_upload], outputs=chatbot ) gr.Markdown(""" 注意:此演示受 Qwen-VL 原始许可证的约束。我们强烈建议用户不要故意生成或允许他人故意生成有害内容, 包括仇恨言论、暴力、色情、欺骗等。(注:本演示受Qwen-VL许可协议约束,强烈建议用户不要传播或允许他人传播以下内容,包括但不限于仇恨言论、暴力、色情、欺诈相关的有害信息 .) Note: This demo is governed by the original license of Qwen-VL. We strongly advise users not to knowingly generate or allow others to knowingly generate harmful content, including hate speech, violence, pornography, deception, etc. (Note: This demo is subject to the license agreement of Qwen-VL. We strongly advise users not to disseminate or allow others to disseminate the following content, including but not limited to hate speech, violence, pornography, and fraud-related harmful information.) """) demo.queue().launch()