poudel's picture
Update app.py
cabf6b4 verified
import gradio as gr
from transformers import AutoModelForSequenceClassification, AutoTokenizer
import torch
# Load the model and tokenizer from Hugging Face
model_name = "poudel/Depression_and_Non-Depression_Classifier" # Replace with your Hugging Face model name
model = AutoModelForSequenceClassification.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
# Define the prediction function
def predict(text):
# Tokenize the input text
inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True, max_length=128)
# Get model predictions
with torch.no_grad():
outputs = model(**inputs)
# Convert logits to probabilities
probabilities = torch.softmax(outputs.logits, dim=-1)
# Get the predicted class (0 or 1)
predicted_class = torch.argmax(probabilities, dim=1).item()
# Map the predicted class to the label (0 = Depression, 1 = Non-depression)
label_mapping = {0: "Depression", 1: "Non-depression"}
return label_mapping[predicted_class]
# Create a Gradio interface
interface = gr.Interface(
fn=predict, # The function to be called for predictions
inputs=gr.Textbox(lines=2, placeholder="Enter some text here..."), # Input textbox for the user
outputs="text", # Output is the predicted class as text
title="Sentiment Classification", # Title of the app
description="Enter a sentence to classify it as 'Depression' or 'Non-depression'.", # Short description
)
# Launch the Gradio app
interface.launch()