barathm111 commited on
Commit
6225e5c
1 Parent(s): d3a54c8

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -25
app.py CHANGED
@@ -20,40 +20,37 @@ pipeline = transformers.pipeline(
20
  use_auth_token=hf_token # Use the Hugging Face token here
21
  )
22
 
23
- # Function to calculate scores and rankings
24
- def calculate_ranking(data):
25
- for institution in data:
26
- institution["Total"] = (
27
- institution["TLR"] + institution["GO"] + institution["OI"] + institution["PR"]
28
- )
29
- ranked_data = sorted(data, key=lambda x: x["Total"], reverse=True)
30
- for rank, institution in enumerate(ranked_data, start=1):
31
- institution["Rank"] = rank
32
- return ranked_data
33
-
34
- # Predefined ranking data
35
  example_data = [
36
  {"Institution": "A", "TLR": 70, "GO": 85, "OI": 90, "PR": 75},
37
  {"Institution": "B", "TLR": 80, "GO": 88, "OI": 85, "PR": 90},
38
  {"Institution": "C", "TLR": 65, "GO": 80, "OI": 70, "PR": 60},
39
  ]
40
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
  # Chatbot function
42
  def chatbot_response(user_message):
43
- # Check for predefined data queries
44
- if "rank" in user_message.lower():
45
- ranked_data = calculate_ranking(example_data)
46
- response = "Here are the ranks of the institutions:\n"
47
- for institution in ranked_data:
48
- response += f"Rank {institution['Rank']}: {institution['Institution']} (Total Score: {institution['Total']})\n"
49
- return response
50
 
51
- # Fallback to model-generated response for out-of-scope questions
52
  outputs = pipeline(
53
- user_message,
54
- max_new_tokens=100, # Restrict length for unexpected questions
55
  do_sample=True,
56
- temperature=0.7, # Slightly random responses for more natural output
57
  top_p=0.9,
58
  )
59
  return outputs[0]["generated_text"]
@@ -61,8 +58,8 @@ def chatbot_response(user_message):
61
  # Gradio interface
62
  def build_gradio_ui():
63
  with gr.Blocks() as demo:
64
- gr.Markdown("## Chatbot with Predefined Data and AI Responses")
65
- gr.Markdown("Ask about institution rankings or any other general query!")
66
  with gr.Row():
67
  user_input = gr.Textbox(label="Your Message", placeholder="Type your message here...")
68
  chatbot_output = gr.Textbox(label="Chatbot Response", interactive=False)
 
20
  use_auth_token=hf_token # Use the Hugging Face token here
21
  )
22
 
23
+ # Predefined data
 
 
 
 
 
 
 
 
 
 
 
24
  example_data = [
25
  {"Institution": "A", "TLR": 70, "GO": 85, "OI": 90, "PR": 75},
26
  {"Institution": "B", "TLR": 80, "GO": 88, "OI": 85, "PR": 90},
27
  {"Institution": "C", "TLR": 65, "GO": 80, "OI": 70, "PR": 60},
28
  ]
29
 
30
+ # Format predefined data into a readable string
31
+ predefined_context = "Here are the institution rankings based on scores:\n"
32
+ for institution in sorted(example_data, key=lambda x: x["TLR"] + x["GO"] + x["OI"] + x["PR"], reverse=True):
33
+ total_score = institution["TLR"] + institution["GO"] + institution["OI"] + institution["PR"]
34
+ predefined_context += f"- {institution['Institution']} (Total Score: {total_score})\n"
35
+
36
+ # System prompt to provide context to the model
37
+ system_prompt = f"""You are an intelligent assistant. Here is some contextual information:
38
+ {predefined_context}
39
+
40
+ When a user asks about rankings, respond with this information. If the user asks general questions, respond appropriately.
41
+ """
42
+
43
  # Chatbot function
44
  def chatbot_response(user_message):
45
+ # Combine system prompt with the user's message
46
+ full_prompt = f"{system_prompt}\nUser: {user_message}\nAssistant:"
 
 
 
 
 
47
 
48
+ # Generate a response using the model
49
  outputs = pipeline(
50
+ full_prompt,
51
+ max_new_tokens=150, # Adjust token limit as needed
52
  do_sample=True,
53
+ temperature=0.7,
54
  top_p=0.9,
55
  )
56
  return outputs[0]["generated_text"]
 
58
  # Gradio interface
59
  def build_gradio_ui():
60
  with gr.Blocks() as demo:
61
+ gr.Markdown("## Intelligent Chatbot with Predefined Context and AI Responses")
62
+ gr.Markdown("Ask about institution rankings or any general query!")
63
  with gr.Row():
64
  user_input = gr.Textbox(label="Your Message", placeholder="Type your message here...")
65
  chatbot_output = gr.Textbox(label="Chatbot Response", interactive=False)