import gradio as gr from llama_cpp import Llama # Load the model llm = Llama.from_pretrained( repo_id="uonlp/Vistral-7B-Chat-gguf", filename="ggml-vistral-7B-chat-f16.gguf" ) # Define the function to interact with the model def chat_with_model(user_input): response = llm.create_chat_completion( messages=[{"role": "user", "content": user_input}] ) return response['choices'][0]['message']['content'] # Define CSS for chat-like appearance custom_css = """ body { background-color: #f0f9ff; font-family: 'Arial', sans-serif; } .gradio-container { border: 2px solid #b3e0ff; border-radius: 15px; padding: 30px; background-color: #ffffff; box-shadow: 0px 4px 8px rgba(0, 0, 0, 0.1); max-width: 600px; margin: auto; } h1 { color: #4a90e2; text-align: center; font-size: 28px; } .input-textbox, .output-textbox { display: block; padding: 15px; border-radius: 20px; margin: 10px 0; width: 100%; font-size: 16px; } .input-textbox { background-color: #e0f7fa; border: 2px solid #b3e0ff; color: #333; text-align: left; } .output-textbox { background-color: #e6eeff; border: 2px solid #4a90e2; color: #333; text-align: left; } .gr-button { background-color: #4da6ff; border: none; border-radius: 10px; color: white; padding: 15px 25px; font-size: 18px; cursor: pointer; margin-top: 20px; width: 100%; } .gr-button:hover { background-color: #3399ff; } .gradio-container:before { content: "💬 Xin chào, tôi có thể giúp gì cho bạn?"; display: block; text-align: center; font-size: 24px; color: #2c6693; margin-bottom: 20px; } """ # Create the Gradio interface iface = gr.Interface( fn=chat_with_model, inputs=gr.Textbox(label="You",placeholder="Hãy hỏi ở đây..."), outputs=gr.Textbox(label="Assistant"), title="Friendly Medical Chatbot", description="Bạn có thể hỏi mọi câu hỏi liên quan đến y tế", theme="default", css=custom_css ) # Launch the interface if __name__ == "__main__": iface.launch()