File size: 1,539 Bytes
353c257
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cabf6b4
353c257
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43

import gradio as gr
from transformers import AutoModelForSequenceClassification, AutoTokenizer
import torch

# Load the model and tokenizer from Hugging Face
model_name = "poudel/Depression_and_Non-Depression_Classifier"  # Replace with your Hugging Face model name
model = AutoModelForSequenceClassification.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)


#  Define the prediction function
def predict(text):
    # Tokenize the input text
    inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True, max_length=128)

    # Get model predictions
    with torch.no_grad():
        outputs = model(**inputs)

    # Convert logits to probabilities
    probabilities = torch.softmax(outputs.logits, dim=-1)

    # Get the predicted class (0 or 1)
    predicted_class = torch.argmax(probabilities, dim=1).item()

    # Map the predicted class to the label (0 = Depression, 1 = Non-depression)
    label_mapping = {0: "Depression", 1: "Non-depression"}

    return label_mapping[predicted_class]

# Create a Gradio interface
interface = gr.Interface(
    fn=predict,  # The function to be called for predictions
    inputs=gr.Textbox(lines=2, placeholder="Enter some text here..."),  # Input textbox for the user
    outputs="text",  # Output is the predicted class as text
    title="Sentiment Classification",  # Title of the app
    description="Enter a sentence to classify it as 'Depression' or 'Non-depression'.",  # Short description
)

#  Launch the Gradio app
interface.launch()