|
import gradio as gr |
|
from transformers import AutoModelForCausalLM |
|
|
|
|
|
model_name = "TheBloke/mistral-ft-optimized-1227-GGUF" |
|
model = AutoModelForCausalLM.from_pretrained(model_name) |
|
|
|
|
|
def chat_with_model(input_text): |
|
|
|
inputs = input_text.encode('utf-8') |
|
|
|
|
|
outputs = model.generate(inputs, max_length=100, num_return_sequences=1) |
|
|
|
|
|
response = outputs[0].decode('utf-8', errors='ignore') |
|
return response |
|
|
|
|
|
interface = gr.Interface(fn=chat_with_model, |
|
inputs="text", |
|
outputs="text", |
|
title="Chatbot con Mistral FT Optimized", |
|
description="Un chatbot b谩sico utilizando el modelo Mistral FT Optimized 1227 en Hugging Face.", |
|
examples=["Hola, 驴c贸mo est谩s?", "Cu茅ntame un chiste."]) |
|
|
|
|
|
interface.launch() |
|
|