kenken999's picture
fda
1afbeb8
raw
history blame
7.11 kB
import os
import hmac
import hashlib
import base64
import subprocess
import time
def validate_signature(body: str, signature: str, secret: str) -> bool:
hash = hmac.new(secret.encode("utf-8"), body.encode("utf-8"), hashlib.sha256).digest()
expected_signature = base64.b64encode(hash).decode("utf-8")
return hmac.compare_digest(expected_signature, signature)
def no_process_file(prompt, foldername):
set_environment_variables()
try:
proc = subprocess.Popen(["mkdir", f"/home/user/app/routers/{foldername}"])
except subprocess.CalledProcessError as e:
return f"Processed Content:\n{e.stdout}\n\nMake Command Error:\n{e.stderr}"
no_extension_path = f"/home/user/app/routers/{foldername}/prompt"
time.sleep(1)
with open(no_extension_path, "a") as f:
f.write(prompt)
time.sleep(1)
try:
prompt_file_path = no_extension_path
with open(prompt_file_path, "w") as prompt_file:
prompt_file.write(prompt)
except Exception as e:
return f"Error writing prompt to file: {str(e)}"
time.sleep(1)
try:
proc = subprocess.Popen(
["make", "run", foldername],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
stdout, stderr = proc.communicate(input="n\ny\ny\n")
return f"Processed Content:\n{stdout}\n\nMake Command Output:\n{stdout}\n\nMake Command Error:\n{stderr}"
except subprocess.CalledProcessError as e:
return f"Processed Content:\n{e.stdout}\n\nMake Command Error:\n{e.stderr}"
def set_environment_variables():
os.environ["OPENAI_API_BASE"] = "https://api.groq.com/openai/v1"
os.environ["OPENAI_API_KEY"] = "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"
os.environ["MODEL_NAME"] = "llama3-8b-8192"
os.environ["LOCAL_MODEL"] = "true"
# Set the environment variable.
def chat_with_interpreter(
message, history, a=None, b=None, c=None, d=None
): # , openai_api_key):
# Set the API key for the interpreter
# interpreter.llm.api_key = openai_api_key
if message == "reset":
interpreter.reset()
return "Interpreter reset", history
full_response = ""
# add_conversation(history,20)
user_entry = {"role": "user", "type": "message", "content": message}
#messages.append(user_entry)
# Call interpreter.chat and capture the result
for chunk in interpreter.chat(message, display=False, stream=True):
# print(chunk)
# output = '\n'.join(item['content'] for item in result if 'content' in item)
full_response = format_response(chunk, full_response)
yield full_response # chunk.get("content", "")
# Extract the 'content' field from all elements in the result
def insert(full_response,message):
age = 28
con = duckdb.connect(database="./workspace/sample.duckdb")
con.execute(
"""
CREATE SEQUENCE IF NOT EXISTS sample_id_seq START 1;
CREATE TABLE IF NOT EXISTS samples (
id INTEGER DEFAULT nextval('sample_id_seq'),
name VARCHAR,
age INTEGER,
PRIMARY KEY(id)
);
"""
)
cur = con.cursor()
con.execute("INSERT INTO samples (name, age) VALUES (?, ?)", (full_response, age))
con.execute("INSERT INTO samples (name, age) VALUES (?, ?)", (message, age))
# データをCSVファイルにエクスポート
con.execute("COPY samples TO 'sample.csv' (FORMAT CSV, HEADER)")
# データをコミット
con.commit()
# データを選択
cur = con.execute("SELECT * FROM samples")
# 結果をフェッチ
res = cur.fetchall()
rows = ""
# 結果を表示
# 結果を文字列に整形
rows = "\n".join([f"name: {row[0]}, age: {row[1]}" for row in res])
# コネクションを閉じる
con.close()
# print(cur.fetchall())
insert(full_response,message)
yield full_response + rows # , history
return full_response, history
async def completion(message: str, history, c=None, d=None):
from groq import Groq
client = Groq(api_key=os.getenv("api_key"))
messages = []
recent_messages = history[-20:]
for conversation in recent_messages:
user_message = conversation[0]
user_entry = {"role": "user", "content": user_message}
messages.append(user_entry)
assistant_message = conversation[1]
assistant_entry = {"role": "assistant", "content": assistant_message}
messages.append(assistant_entry)
user_entry = {"role": "user", "content": message}
messages.append(user_entry)
system_prompt = {"role": "system", "content": "あなたは日本語の優秀なアシスタントです。"}
messages.insert(0, system_prompt)
async with async_timeout.timeout(GENERATION_TIMEOUT_SEC):
try:
stream = client.chat.completions.create(
model="llama3-8b-8192",
messages=messages,
temperature=1,
max_tokens=1024,
top_p=1,
stream=True,
stop=None,
)
all_result = ""
for chunk in stream:
current_content = chunk.choices[0].delta.content or ""
all_result += current_content
yield current_content
yield all_result
except asyncio.TimeoutError:
raise HTTPException(status_code=504, detail="Stream timed out")
def process_file(fileobj, prompt, foldername):
set_environment_variables()
try:
proc = subprocess.Popen(["mkdir", f"/home/user/app/routers/{foldername}"])
except subprocess.CalledProcessError as e:
return f"Processed Content:\n{e.stdout}\n\nMake Command Error:\n{e.stderr}"
time.sleep(2)
path = f"/home/user/app/routers/{foldername}/" + os.path.basename(fileobj)
shutil.copyfile(fileobj.name, path)
base_name = os.path.splitext(os.path.basename(fileobj))[0]
no_extension_path = f"/home/user/app/routers/{foldername}/{base_name}"
shutil.copyfile(fileobj, no_extension_path)
with open(no_extension_path, "a") as f:
f.write(prompt)
try:
prompt_file_path = no_extension_path
with open(prompt_file_path, "w") as prompt_file:
prompt_file.write(prompt)
except Exception as e:
return f"Error writing prompt to file: {str(e)}"
time.sleep(1)
try:
proc = subprocess.Popen(
["make", "run", foldername],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
stdout, stderr = proc.communicate(input="n\ny\ny\n")
return f"Processed Content:\n{stdout}\n\nMake Command Output:\n{stdout}\n\nMake Command Error:\n{stderr}"
except subprocess.CalledProcessError as e:
return f"Processed Content:\n{stdout}\n\nMake Command Error:\n{e.stderr}"