import gradio as gr from transformers import AutoModelForSequenceClassification, AutoTokenizer import torch # Load the model and tokenizer model_name = "ahmetyaylalioglu/text-emotion-classifier" # Replace with your actual model path model = AutoModelForSequenceClassification.from_pretrained(model_name) tokenizer = AutoTokenizer.from_pretrained(model_name) # Function to predict emotion def predict_emotion(text): inputs = tokenizer(text, return_tensors="pt", truncation=True, max_length=512) with torch.no_grad(): outputs = model(**inputs) probabilities = torch.nn.functional.softmax(outputs.logits, dim=-1) prediction = torch.argmax(probabilities, dim=-1).item() emotion = model.config.id2label[prediction] confidence = probabilities[0][prediction].item() return f"Emotion: {emotion}\nConfidence: {confidence:.2f}" # Create Gradio interface iface = gr.Interface( fn=predict_emotion, inputs=gr.Textbox(lines=2, placeholder="Enter text here..."), outputs="text", title="Emotion Classifier", description="Enter some text and click 'Submit' to predict the emotion." ) # Launch the app iface.launch()