Spaces:
Sleeping
Sleeping
import transformers | |
import torch | |
import gradio as gr | |
import os | |
# Retrieve Hugging Face API token from environment variable | |
hf_token = os.getenv("HF_TOKEN") | |
# Ensure the token is available | |
if not hf_token: | |
raise ValueError("Hugging Face token not found. Please add it to the secrets in Hugging Face Spaces.") | |
# Load the chatbot model with the token (for private models or usage limits) | |
model_id = "meta-llama/Meta-Llama-3-8B-Instruct" | |
pipeline = transformers.pipeline( | |
"text-generation", | |
model=model_id, | |
model_kwargs={"torch_dtype": torch.bfloat16}, | |
device_map="auto", | |
use_auth_token=hf_token # Use the Hugging Face token here | |
) | |
# Function to calculate scores and rankings | |
def calculate_ranking(data): | |
for institution in data: | |
institution["Total"] = ( | |
institution["TLR"] + institution["GO"] + institution["OI"] + institution["PR"] | |
) | |
ranked_data = sorted(data, key=lambda x: x["Total"], reverse=True) | |
for rank, institution in enumerate(ranked_data, start=1): | |
institution["Rank"] = rank | |
return ranked_data | |
# Chatbot function with ranking logic | |
def chatbot_response(user_message): | |
if "rank" in user_message.lower(): | |
# Example data for ranking | |
example_data = [ | |
{"Institution": "A", "TLR": 70, "GO": 85, "OI": 90, "PR": 75}, | |
{"Institution": "B", "TLR": 80, "GO": 88, "OI": 85, "PR": 90}, | |
{"Institution": "C", "TLR": 65, "GO": 80, "OI": 70, "PR": 60}, | |
] | |
ranked_data = calculate_ranking(example_data) | |
response = "Here are the ranks of the institutions:\n" | |
for institution in ranked_data: | |
response += f"Rank {institution['Rank']}: {institution['Institution']} (Total Score: {institution['Total']})\n" | |
return response | |
else: | |
# Generate chatbot response from model | |
outputs = pipeline( | |
user_message, | |
max_new_tokens=256, | |
do_sample=True, | |
temperature=0.6, | |
top_p=0.9, | |
) | |
return outputs[0]["generated_text"] | |
# Gradio interface | |
def build_gradio_ui(): | |
with gr.Blocks() as demo: | |
gr.Markdown("## Chatbot with Hugging Face Spaces") | |
gr.Markdown("Type a message to interact with the chatbot! (Ask about institution rankings too!)") | |
with gr.Row(): | |
user_input = gr.Textbox(label="Your Message", placeholder="Type your message here...") | |
chatbot_output = gr.Textbox(label="Chatbot Response", interactive=False) | |
submit_button = gr.Button("Send") | |
submit_button.click(chatbot_response, inputs=[user_input], outputs=[chatbot_output]) | |
return demo | |
# Launch the Gradio app with a public link | |
demo = build_gradio_ui() | |
if __name__ == "__main__": | |
demo.launch(share=True) # Enable public link | |