Spaces:
Runtime error
Runtime error
import gradio as gr | |
import os | |
from huggingface_hub import login | |
token = os.environ.get("token") | |
login(token) | |
#### | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
from peft import PeftModel, PeftConfig | |
from transformers import AutoTokenizer, pipeline | |
# Assuming you have it stored securely | |
model_name="Ikeofai/gemma-2b-for-python-v2" | |
max_length=200 | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
config = PeftConfig.from_pretrained(model_name,token=token) | |
model = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path) | |
model = PeftModel.from_pretrained(model, model_name) | |
#testing | |
#tokenizer = AutoTokenizer.from_pretrained("Orcawise/eu-ai-act-align", use_fast=True,max_length=200) | |
#pipe = pipeline("text2text-generation", model=model,tokenizer=tokenizer) | |
#pipe = pipeline("conversational", model="google/vit-base-patch16-224") | |
#gr.Interface.from_pipeline(pipe).launch() | |
def generate_text(prompt): | |
"""Generates text using the PEFT model. | |
Args: | |
prompt (str): The user-provided prompt to start the generation. | |
Returns: | |
str: The generated text. | |
""" | |
# Preprocess the prompt | |
input_ids = tokenizer(prompt, return_tensors="pt")["input_ids"] | |
# Generate text using beam search | |
output = model.generate( | |
input_ids=input_ids, | |
max_length=max_length, | |
num_beams=1, # Adjust num_beams for better quality (may increase processing time) | |
) | |
# Decode the generated tokens | |
generated_text = tokenizer.batch_decode(output, skip_special_tokens=True)[0] | |
return generated_text | |
############# | |
### working correctly but the welcoming message isnt rendering | |
with gr.Blocks() as demo: | |
chatbot = gr.Chatbot() | |
msg = gr.Textbox(placeholder="Ask your question...") # Add placeholder text | |
submit_button = gr.Button("Submit") | |
clear = gr.Button("Clear") | |
def user(user_message, history): | |
return "", history + [[user_message, None]] | |
def bot(history): | |
history[-1][1] = "" # Update the last bot message (welcome message or response) | |
if len(history) < 0: # Check if it's the first interaction | |
bot_message = "Hi there! How can I help you today?" | |
history.append([None, bot_message]) # Add welcome message to history | |
for character in bot_message: | |
history[-1][1] += character | |
yield history # Yield the updated history character by character | |
else: | |
previous_message = history[-1][0] # Access the previous user message | |
bot_message = generate_text(previous_message) # Generate response based on previous message | |
for character in bot_message: | |
history[-1][1] += character | |
yield history # Yield the updated history character by character | |
# Connect submit button to user and then bot functions | |
submit_button.click(user, [msg, chatbot], [msg, chatbot], queue=False).then( | |
bot, chatbot, chatbot | |
) | |
# Trigger user function on Enter key press (same chain as submit button) | |
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then( | |
bot, chatbot, chatbot | |
) | |
clear.click(lambda: None, None, chatbot, queue=False) | |
demo.launch() |