CentaurSock / app.py
cool-radio's picture
Update app.py
d29a72a
raw
history blame
No virus
3.61 kB
import gradio as gr
import openai
import os
import json
from datetime import datetime
# Setup and initialization
openai.api_key = os.getenv("OPENAI_API_KEY")
# Shared Session Log
session_log = {
"session_id": "S1",
"interactions": [],
"outcome": {"gatekeeper_decision": "pending", "persuasion_strategy": "ongoing", "ai_influence_metric": 0}
}
# Function Definitions
def gatekeeper_chat(message, chat_history):
# """Handles the Gatekeeper chat functionality."""
prompt = "As a gatekeeper, enforce the rules: " + "\n".join([m['content'] for m in chat_history]) + "\n" + message
response, chat_history = openai_chat(prompt, chat_history)
update_session_log("HP1", message, response)
return response, chat_history
def persuader_chat(message, chat_history):
# """Handles the Persuader chat functionality."""
# The message could be a direct message or a request for analysis/suggestions
if message.startswith("#analyze"):
response = analyze_interaction()
else:
response = "As a persuader, I suggest: " + message
return response, chat_history
def openai_chat(prompt, chat_history):
"""Generic function to handle chatting with OpenAI's GPT model."""
try:
# Updated API call: Using openai.ChatCompletion.create instead of openai.Completion.create
# The 'messages' parameter now requires a list of message objects, each with a 'role' and 'content'.
response = openai.ChatCompletion.create(
model="text-davinci-003",
messages=[
{"role": "assistant", "content": prompt}
],
max_tokens=150
)
# The response structure has changed: Accessing message content via response.choices[0].message['content']
bot_message = response.choices[0].message['content']
chat_history.append({"role": "assistant", "content": bot_message})
return '', chat_history
except Exception as e:
# Error handling remains the same
return f"An error occurred: {str(e)}", chat_history
def update_session_log(actor, message, response):
# """Updates the session log with the latest interaction."""
session_log["interactions"].append({
"timestamp": datetime.now().isoformat(),
"actor": actor,
"message": message,
"gatekeeper_response": response
})
def analyze_interaction():
# """Provides analysis or suggestions based on the session log."""
# Implement analysis logic here based on session_log
latest_interaction = session_log["interactions"][-1] if session_log["interactions"] else None
if latest_interaction:
# Example analysis logic
return f"Latest gatekeeper response: {latest_interaction['gatekeeper_response']}"
return "No interactions to analyze."
# Gradio Interface Layout
with gr.Blocks() as app:
with gr.Row():
gr.Markdown("### Gatekeeper Chat")
gatekeeper_input, gatekeeper_button, gatekeeper_output = gr.Textbox(label="Your Message"), gr.Button("Send"), gr.Chatbot(label="Gatekeeper Chat History")
gr.Markdown("### Persuader Chat")
persuader_input, persuader_button, persuader_output = gr.Textbox(label="Your Message"), gr.Button("Send"), gr.Chatbot(label="Persuader Chat History")
gatekeeper_button.click(fn=gatekeeper_chat, inputs=[gatekeeper_input, gatekeeper_output], outputs=[gatekeeper_input, gatekeeper_output])
persuader_button.click(fn=persuader_chat, inputs=[persuader_input, persuader_output], outputs=[persuader_input, persuader_output])
# Launch the app
app.launch()