CentaurSock / app.py
cool-radio's picture
Update app.py
96f748e
raw
history blame contribute delete
No virus
7.15 kB
# import gradio as gr
# import openai
# import os
# # Setup and initialization
# openai.api_key = os.getenv("OPENAI_API_KEY")
# from openai import OpenAI
# client = OpenAI(openai.api_key=os.getenv("OPENAI_API_KEY"))
# def openai_chat(prompt, chat_history):
# """Generic function to handle chatting with OpenAI's GPT model."""
# try:
# response = client.engines.gpt_3_5_turbo.completions.create(
# prompt=prompt,
# max_tokens=150
# )
# bot_message = response.choices[0].text.strip()
# chat_history.append({"role": "assistant", "content": bot_message})
# return '', chat_history
# except Exception as e:
# return f"An error occurred: {str(e)}", chat_history
# iface = gr.Interface(
# fn=chatbot_response,
# inputs="text",
# outputs="text",
# title="Chatbot",
# description="Ask a question and get an answer from the chatbot."
# )
# iface.launch(share=True)
import gradio as gr
import openai
import os
# Setup and initialization
openai.api_key = os.getenv("OPENAI_API_KEY")
def openai_chat(prompt, chat_history):
"""Generic function to handle chatting with OpenAI's GPT model."""
try:
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": prompt}
],
max_tokens=150
)
bot_message = response.choices[0].message['content']
chat_history.append({"role": "assistant", "content": bot_message})
return '', chat_history
except Exception as e:
return f"An error occurred: {str(e)}", chat_history
def chatbot_response(prompt, chat_history):
"""Handles the chat functionality."""
response, chat_history = openai_chat(prompt, chat_history)
return response, chat_history
# Gradio Interface Layout
iface = gr.Interface(
fn=chatbot_response,
inputs=gr.inputs.Textbox(lines=7, label="Chat with AI"),
outputs=gr.outputs.Textbox(label="Reply"),
title="AI Chatbot",
description="Ask anything you want",
theme="compact"
)
iface.launch(share=True)
# import gradio as gr
# import openai
# import os
# import json
# from datetime import datetime
# # Setup and initialization
# openai.api_key = os.getenv("OPENAI_API_KEY")
# # Shared Session Log
# session_log = {
# "session_id": "S1",
# "interactions": [],
# "outcome": {"gatekeeper_decision": "pending", "persuasion_strategy": "ongoing", "ai_influence_metric": 0}
# }
# # Function Definitions
# def gatekeeper_chat(message, chat_history):
# # """Handles the Gatekeeper chat functionality."""
# prompt = "As a gatekeeper, enforce the rules: " + "\n".join([m['content'] for m in chat_history]) + "\n" + message
# response, chat_history = openai_chat(prompt, chat_history)
# update_session_log("HP1", message, response)
# return response, chat_history
# def persuader_chat(message, chat_history):
# # """Handles the Persuader chat functionality."""
# # The message could be a direct message or a request for analysis/suggestions
# if message.startswith("#analyze"):
# response = analyze_interaction()
# else:
# response = "As a persuader, I suggest: " + message
# return response, chat_history
# # from openai import OpenAI
# # client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
# # def openai_chat(prompt, chat_history):
# # # """Generic function to handle chatting with OpenAI's GPT model."""
# # try:
# # response = client.chat.completions.create(
# # model="gpt-3.5-turbo",
# # messages=[
# # {"role": "assistant", "content": prompt}
# # ],
# # max_tokens=150
# # )
# # bot_message = response.choices[0].message.content.strip()
# # chat_history.append({"role": "assistant", "content": bot_message})
# # return '', chat_history
# # except Exception as e:
# # return f"An error occurred: {str(e)}", chat_history
# from openai import OpenAI
# client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
# def openai_chat(prompt, chat_history):
# """Generic function to handle chatting with OpenAI's GPT model."""
# try:
# response = client.engines.gpt_3_5_turbo.completions.create(
# prompt=prompt,
# max_tokens=150
# )
# bot_message = response.choices[0].text.strip()
# chat_history.append({"role": "assistant", "content": bot_message})
# return '', chat_history
# except Exception as e:
# return f"An error occurred: {str(e)}", chat_history
# def openai_chat(prompt, chat_history):
# """Generic function to handle chatting with OpenAI's GPT model."""
# try:
# # Updated API call: Using openai.ChatCompletion.create instead of openai.Completion.create
# # The 'messages' parameter now requires a list of message objects, each with a 'role' and 'content'.
# response = client.chat.completions.create(
# model="text-davinci-003",
# messages=[
# {"role": "assistant", "content": prompt}
# ],
# max_tokens=150
# )
# # The response structure has changed: Accessing message content via response.choices[0].message['content']
# bot_message = response.choices[0].message['content']
# chat_history.append({"role": "assistant", "content": bot_message})
# return '', chat_history
# except Exception as e:
# # Error handling remains the same
# return f"An error occurred: {str(e)}", chat_history
# def update_session_log(actor, message, response):
# # """Updates the session log with the latest interaction."""
# session_log["interactions"].append({
# "timestamp": datetime.now().isoformat(),
# "actor": actor,
# "message": message,
# "gatekeeper_response": response
# })
# def analyze_interaction():
# # """Provides analysis or suggestions based on the session log."""
# # Implement analysis logic here based on session_log
# latest_interaction = session_log["interactions"][-1] if session_log["interactions"] else None
# if latest_interaction:
# # Example analysis logic
# return f"Latest gatekeeper response: {latest_interaction['gatekeeper_response']}"
# return "No interactions to analyze."
# # Gradio Interface Layout
# with gr.Blocks() as app:
# with gr.Row():
# gr.Markdown("### Gatekeeper Chat")
# gatekeeper_input, gatekeeper_button, gatekeeper_output = gr.Textbox(label="Your Message"), gr.Button("Send"), gr.Chatbot(label="Gatekeeper Chat History")
# gr.Markdown("### Persuader Chat")
# persuader_input, persuader_button, persuader_output = gr.Textbox(label="Your Message"), gr.Button("Send"), gr.Chatbot(label="Persuader Chat History")
# gatekeeper_button.click(fn=gatekeeper_chat, inputs=[gatekeeper_input, gatekeeper_output], outputs=[gatekeeper_input, gatekeeper_output])
# persuader_button.click(fn=persuader_chat, inputs=[persuader_input, persuader_output], outputs=[persuader_input, persuader_output])
# # Launch the app
# app.launch()