import gradio as gr from sentence_transformers import SentenceTransformer, util import openai import os import os os.environ["TOKENIZERS_PARALLELISM"] = "false" # Initialize paths and model identifiers for easy configuration and maintenance filename = "output_country_details.txt" # Path to the file storing country-specific details retrieval_model_name = 'output/sentence-transformer-finetuned/' openai.api_key = os.environ["OPENAI_API_KEY"] # Attempt to load the necessary models and provide feedback on success or failure try: retrieval_model = SentenceTransformer(retrieval_model_name) print("Models loaded successfully.") except Exception as e: print(f"Failed to load models: {e}") def load_and_preprocess_text(filename): """ Load and preprocess text from a file, removing empty lines and stripping whitespace. """ try: with open(filename, 'r', encoding='utf-8') as file: segments = [line.strip() for line in file if line.strip()] print("Text loaded and preprocessed successfully.") return segments except Exception as e: print(f"Failed to load or preprocess text: {e}") return [] segments = load_and_preprocess_text(filename) def find_relevant_segment(user_query, segments): """ Find the most relevant text segment for a user's query using cosine similarity among sentence embeddings. This version tries to match country names in the query with those in the segments. """ try: # Lowercase the query for better matching lower_query = user_query.lower() # Filter segments to include only those containing country names mentioned in the query country_segments = [seg for seg in segments if any(country.lower() in seg.lower() for country in ['Guatemala', 'Mexico', 'U.S.', 'United States'])] # If no specific country segments found, default to general matching if not country_segments: country_segments = segments query_embedding = retrieval_model.encode(lower_query) segment_embeddings = retrieval_model.encode(country_segments) similarities = util.pytorch_cos_sim(query_embedding, segment_embeddings)[0] best_idx = similarities.argmax() return country_segments[best_idx] except Exception as e: print(f"Error in finding relevant segment: {e}") return "" def generate_response(user_query, relevant_segment): """ Generate a response emphasizing the bot's capability in providing country-specific visa information. """ try: system_message = "You are a chess chatbot specialized in providing information on chess rules, strategies, and terminology." user_message = f"Here's the information on visa requirements for your query: {relevant_segment}" messages = [ {"role": "system", "content": system_message}, {"role": "user", "content": user_message} ] response = openai.ChatCompletion.create( model="gpt-3.5-turbo", # Verify model name messages=messages, max_tokens=150, temperature=0.2, top_p=1, frequency_penalty=0, presence_penalty=0 ) return response['choices'][0]['message']['content'].strip() except Exception as e: print(f"Error in generating response: {e}") return f"Error in generating response: {e}" # Define and configure the Gradio application interface to interact with users. # Define and configure the Gradio application interface to interact with users. def query_model(question): """ Process a question, find relevant information, and generate a response. """ if question == "": return "Welcome to ChessBot! Ask me anything about chess rules, strategies, and terminology." relevant_segment = find_relevant_segment(question, segments) if not relevant_segment: return "Could not find specific information. Please refine your question." response = generate_response(question, relevant_segment) return response # Define the welcome message and specific topics and countries the chatbot can provide information about. welcome_message = """ # Welcome to ChessBot! ## Your AI-driven assistant for all chess-related queries. """ topics = """ ### Feel Free to ask me anything from the topics below! - Chess piece movements - Special moves - Game phases - Common strategies - Chess terminology - Famous games - Chess tactics """ # Define and configure the Gradio application interface to interact with users. def query_model(question): """ Process a question, find relevant information, and generate a response. Args: question (str): User input question. Returns: str: Generated response or a default welcome message if no question is provided. """ if question == "": return welcome_message relevant_segment = find_relevant_segment(question, segments) response = generate_response(question, relevant_segment) return response # Setup the Gradio Blocks interface with custom layout components with gr.Blocks() as demo: gr.Markdown(welcome_message) # Display the formatted welcome message with gr.Row(): with gr.Column(): gr.Markdown(topics) # Show the topics on the left side with gr.Row(): img = gr.Image(os.path.join(os.getcwd(), "final.png"), width=500) # Include an image for visual appeal with gr.Row(): with gr.Column(): question = gr.Textbox(label="Your question", placeholder="What do you want to ask about?") answer = gr.Textbox(label="ChessBot Response", placeholder="ChessBot will respond here...", interactive=False, lines=10) submit_button = gr.Button("Submit") submit_button.click(fn=query_model, inputs=question, outputs=answer) # Launch the Gradio app to allow user interaction demo.launch(share=True)