Spaces:
Sleeping
Sleeping
import os | |
from openai import OpenAI | |
import requests | |
import gradio as gr | |
# Initialize OpenAI client | |
client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY")) | |
# Vector database search function | |
def search_document(query, k=5): | |
url = "http://154.12.226.68:8000/search" | |
payload = { | |
"text": query, | |
"k": k | |
} | |
headers = { | |
"Content-Type": "application/json" | |
} | |
try: | |
response = requests.post(url, json=payload, headers=headers) | |
response.raise_for_status() | |
return response.json() | |
except requests.exceptions.RequestException as e: | |
return f"An error occurred: {e}" | |
# Function to query OpenAI | |
def query_openai(prompt): | |
try: | |
response = client.chat.completions.create( | |
model="gpt-4o-mini", | |
messages=[ | |
{"role": "system", "content": "You are a helpful assistant. Answer the question based on the provided context."}, | |
{"role": "user", "content": prompt} | |
] | |
) | |
return response.choices[0].message.content | |
except Exception as e: | |
return f"An error occurred while querying OpenAI: {e}" | |
# Function to perform vector search and format results | |
def vector_search(query): | |
results = search_document(query) | |
if isinstance(results, str): # Error occurred | |
return results | |
if not isinstance(results, dict) or 'results' not in results: | |
return "Unexpected format in vector database response." | |
formatted_results = "" | |
for i, result in enumerate(results['results'], 1): | |
content = result['metadata']['content'] | |
source = f"Source {i}: {result['metadata'].get('source', 'Unknown source')}, page {result['metadata'].get('page', 'Unknown page')}" | |
metadata = ", ".join([f"{k}: {v}" for k, v in result['metadata'].items() if k != 'content']) | |
formatted_results += f"{source}\nMetadata: {metadata}\nContent: {content}\n\n" | |
return formatted_results | |
# Combined function for search and query | |
def search_and_query(question): | |
# First, perform the vector search | |
search_results = vector_search(question) | |
# Then, use these results to query OpenAI | |
prompt = f"""Given the following context and question, provide a comprehensive and accurate answer. Use ONLY the information provided in the context to answer. If the context doesn't contain relevant information to answer the question, state that clearly. | |
Context: | |
{search_results} | |
Question: {question} | |
Answer:""" | |
openai_response = query_openai(prompt) | |
# Return both the search results and the OpenAI response | |
return search_results, openai_response | |
# Gradio interface | |
with gr.Blocks() as demo: | |
question_input = gr.Textbox(label="Enter your question") | |
search_output = gr.Textbox(label="Vector Search Results") | |
answer_output = gr.Textbox(label="OpenAI Answer") | |
query_button = gr.Button("Get Answer") | |
query_button.click(search_and_query, inputs=question_input, outputs=[search_output, answer_output]) | |
demo.launch() |