EU_AI_ACT / app.py
Komal-patra's picture
update app.py
aeac2da verified
raw
history blame
6.32 kB
import os
import gradio as gr
from huggingface_hub import login
from transformers import AutoModelForSeq2SeqLM, T5Tokenizer
from peft import PeftModel, PeftConfig
# Hugging Face login
token = os.environ.get("token")
login(token)
print("Login is successful")
# Model and tokenizer setup
MODEL_NAME = "google/flan-t5-base"
tokenizer = T5Tokenizer.from_pretrained(MODEL_NAME, token=token)
config = PeftConfig.from_pretrained("Komal-patra/results")
base_model = AutoModelForSeq2SeqLM.from_pretrained("google/flan-t5-base")
model = PeftModel.from_pretrained(base_model, "Komal-patra/results")
# Text generation function
def generate_text(prompt, max_length=150):
inputs = tokenizer(prompt, return_tensors="pt")
outputs = model.generate(
input_ids=inputs["input_ids"],
max_length=max_length,
num_beams=1,
repetition_penalty=2.2
)
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
return generated_text
# Custom CSS for the UI
custom_css = """
.message.pending {
background: #A8C4D6;
}
/* Response message */
.message.bot.svelte-1s78gfg.message-bubble-border {
border-color: #266B99;
display: flex;
align-items: center;
}
.message.bot.svelte-1s78gfg.message-bubble-border::before {
content: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAK8AAACUCAMAAADS8YkpAAAAulBMVEX///8jHyDz8/MAAADt7e35+fn29vb8/PxAhq5cWluOjI0WEBJPlr1msNMdGBmC0O9LR0h5xuZFQkN+fX1bo8izsrIKAAC9vLyhoKAoJCXV1NUtcp/f3t5ubG15d3jm5uY+OzwxLS5TUFE5faeVlJRkYmPLysqqqakfa5o4MzXI6vmUtcufu8/f6e9hkrTD1OG1z9/Q4euEqcKZw9rq9vt4udiw2Ou25fhsn76a3PbX8PuNyeWJutSh0ec4kmjiAAAGHklEQVR4nO2YC1OdOBSAeWOIphTkFXkE5LHYqnV7ravd/v+/tScB7uWqXa8z3ersnG+mUwIJ+XI4eVw1DUEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEF+Cbn11gavwzMH960dXkPL4uGtHV5Dy9jYv7XEixg8iYY0TYeIMJ0V7zuHqyjsGjFmjLG6znRdr6O3Vvo5dlIENaOM6TsY4W+t9TMMz6R07ToJs/KdGjtRWATZE2GdZp71G9c11z6woqFpFvcC+kSYUZI4/6njCrtv+4OiY3/6fCmrF08jrLOxfW6dsHLgFy8gRmh2xiEVnfOzq2v4v/KeE9bL/FF9nnoFAUpv+JUJDpN+OCgjLv84Ozv6osl5x+aZxlZLBev2rKzWjykJgSZmfntQRA4Utg7Lves/zs7Pj27gyvFkDjOv4mm9y2YWrISrksWeZTiAkYc6C7d9uIa1dGhbku1QHFXc2rycpMbUflVRvWKO/mfwPTo62sjbkMPTVpwXK2F9F8WBsnZbcFNmJvIiT9qwIL5ooQsjCTtfQgpZtNpiKvqdl8j3eEWiRNykbKspBm2Xq3yI5INqKP25vTc/T0viB/CCMJIv+HPy/SofWQ3VqexGc8JdStBmEc47ylbDzgktZM8BrQO/abrU1XhDa78oy7IgQefIgfuNLJZFQ2kIc1TERGkYIjanPTTK5Kit0pTd5J05qgak8X11iElG6hdh6pXFqDd88gXhU5kRWi83ZDXOvFlFOJy/Bdj4qyyzipjIvKc0gu9nOK5WNXGTGDbgGIb6ovK+KrsdlQJtHCiNHnbUUsbBbeMR4muFMTE0N6qzbXu1+vORtZYNV7bDixiG9GnyPf6qAhdSndUpRKBaZ0SQbH0b+xnfbFncEnNMfpaWvSngWWXW6nSd0kz3ZaSrIi6MxddOM1GtG9keC7bpGI0m177MvieX6hMLSGHWpZE3rpc3z976PhNftvX1YvJ4/bOdCZsrX61moRQImS8yWZcLmrqr+FIv4ZzD91IdOQUlhmsr3D4wh3l9ODo+/qaeN1KTsnr/RFFWz8W3euLbxcVeeODgB4k3QajyDeMOqrhk9Domy0msZvjkC/nO4rGBuVAUZQRvtQgdQ2+my8xUs6b5NgXYgTWKxjHEeH/fKK3FV6zjWz72beJyb9/Lu5r5hBRywRZM+SamTOCqbngbe648bTXVzlcOsCNC1NSkNUwbg1Cd7ICVxLk6U8LHxz9cmOq1IF6Y7UeXLafhvGPUWdvQR/lbxN06H+zUzAZYrafBTvmQCzrIAjEiSmytElTlx+Irv7FRVTlPCPW5ZkA+WHLuTsiP+2XWPYYlwnXUPceDA/GyyTEq0rl/UGPpTieq43Q/voMZrOeb007Bm1Jj8nVK0zO0IQ7dXsDc4mamWqx8Z3r5IVyP+Y820Zujs/PJ9/Zm11MUkkCnQN2sDsLcpyOfI+z0sHhVMoiZvvhyEYerAEN8xfaXYDT5uqmckyGMu+pAFdY3vvJ1nWV9dyOzgVf1Ataz5Z6ttoZPS3zXwhCPXv6cS6O980MC+0GbJH3fR56gpNf2fd1hpGSIoiSKhiGyYZWlXQSV+ySJimXlFXobBVmv2WFcwl4zZTzsF2q+talsDtXTUZc7l3x7AbckQ6ticX01CZ+enuwJL2w2qwJviRhFEIhadKkaipuQYndY6L2mHrO6HuuxcGB3LoOxCYLAF6IpB1XNaGHrajywTGCrDrrp2OukQQuZ2HcCmmZiHEcyqIE4SdhAqYZ/wlcd2p+PFt+T22+XT3Tv9o4oTgU/pIGEV3NiGHm1qmHkEPso6XkuM9e1ct4nUOzhxDwvhdvjs13BRWUv7eZbnE/tlweaVfGewxs4z6dbl3/NviB88v3Hxdpvc3f/8CTiLvDMh1g//5fiy7zU4PJqyl/QPfkIfP/2sNm4m83D3f2HDxev6+u3ABFe4quEP378sPAedaXwEt993/uLQ3+2/mYuv54+43v/8G7/Umlf//3Y9/5u83K7t+Py5sftye3tont/sXm3wZ1xNw8/vqsFAmTfdWwRBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQ5H/NP43Akjj51w4UAAAAAElFTkSuQmCC');
margin-right: 10px;
}
/* User message */
.message.user.svelte-1s78gfg.message-bubble-border {
background: #9DDDF9;
border-color: #9DDDF9;
display: flex;
align-items: center;
}
.message.user.svelte-1s78gfg.message-bubble-border::before {
content: url('https://path_to_user_icon.png');
margin-right: 10px;
}
/* For both user and response message as per the document */
span.md.svelte-8tpqd2.chatbot.prose p {
color: #266B99;
}
/* Chatbot container */
.gradio-container {
background: #1c1c1c; /* Dark background */
color: white; /* Light text color */
}
/* RED (Hex: #DB1616) for action buttons and links only */
.clear-btn {
background: #DB1616;
color: white;
}
/* Primary colors are set to be used for all sorts */
.submit-btn {
background: #266B99;
color: white;
}
"""
# Gradio interface setup
with gr.Blocks(css=custom_css) as demo:
with gr.Row():
with gr.Column(scale=1):
gr.Markdown("<h2>My chats</h2>")
chat_topics = gr.Markdown("<!-- Dynamic content -->")
with gr.Column(scale=3):
gr.Markdown("<h1>Ask a question about the EU AI Act</h1>")
chatbot = gr.Chatbot()
msg = gr.Textbox(placeholder="Ask your question...", show_label=False) # Add placeholder text
submit_button = gr.Button("Submit", elem_classes="submit-btn")
clear = gr.Button("Clear", elem_classes="clear-btn")
def user(user_message, history):
return "", history + [[user_message, None]]
def bot(history):
if len(history) == 1: # Check if it's the first interaction
bot_message = "Hi there! How can I help you today?"
history[-1][1] = bot_message # Add welcome message to history
else:
history[-1][1] = "" # Clear the last bot message
previous_message = history[-1][0] # Access the previous user message
bot_message = generate_text(previous_message) # Generate response based on previous message
history[-1][1] = bot_message # Update the last bot message
return history
submit_button.click(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot
)
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot
)
clear.click(lambda: None, None, chatbot, queue=False)
demo.launch()