Support websearch for openAI chat completion mode (#608)
Browse files
src/lib/server/endpoints/openai/endpointOai.ts
CHANGED
@@ -4,6 +4,7 @@ import { openAIChatToTextGenerationStream } from "./openAIChatToTextGenerationSt
|
|
4 |
import { buildPrompt } from "$lib/buildPrompt";
|
5 |
import { OPENAI_API_KEY } from "$env/static/private";
|
6 |
import type { Endpoint } from "../endpoints";
|
|
|
7 |
|
8 |
export const endpointOAIParametersSchema = z.object({
|
9 |
weight: z.number().int().positive().default(1),
|
@@ -54,7 +55,37 @@ export async function endpointOai(
|
|
54 |
};
|
55 |
} else if (completion === "chat_completions") {
|
56 |
return async ({ conversation }) => {
|
57 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
58 |
role: message.from,
|
59 |
content: message.content,
|
60 |
}));
|
@@ -63,8 +94,8 @@ export async function endpointOai(
|
|
63 |
await openai.chat.completions.create({
|
64 |
model: model.id ?? model.name,
|
65 |
messages: conversation.preprompt
|
66 |
-
? [{ role: "system", content: conversation.preprompt }, ...
|
67 |
-
:
|
68 |
stream: true,
|
69 |
max_tokens: model.parameters?.max_new_tokens,
|
70 |
stop: model.parameters?.stop,
|
|
|
4 |
import { buildPrompt } from "$lib/buildPrompt";
|
5 |
import { OPENAI_API_KEY } from "$env/static/private";
|
6 |
import type { Endpoint } from "../endpoints";
|
7 |
+
import { format } from "date-fns";
|
8 |
|
9 |
export const endpointOAIParametersSchema = z.object({
|
10 |
weight: z.number().int().positive().default(1),
|
|
|
55 |
};
|
56 |
} else if (completion === "chat_completions") {
|
57 |
return async ({ conversation }) => {
|
58 |
+
let messages = conversation.messages;
|
59 |
+
const webSearch = conversation.messages[conversation.messages.length - 1].webSearch;
|
60 |
+
|
61 |
+
if (webSearch && webSearch.context) {
|
62 |
+
const lastMsg = messages.slice(-1)[0];
|
63 |
+
const messagesWithoutLastUsrMsg = messages.slice(0, -1);
|
64 |
+
const previousUserMessages = messages.filter((el) => el.from === "user").slice(0, -1);
|
65 |
+
|
66 |
+
const previousQuestions =
|
67 |
+
previousUserMessages.length > 0
|
68 |
+
? `Previous questions: \n${previousUserMessages
|
69 |
+
.map(({ content }) => `- ${content}`)
|
70 |
+
.join("\n")}`
|
71 |
+
: "";
|
72 |
+
const currentDate = format(new Date(), "MMMM d, yyyy");
|
73 |
+
messages = [
|
74 |
+
...messagesWithoutLastUsrMsg,
|
75 |
+
{
|
76 |
+
from: "user",
|
77 |
+
content: `I searched the web using the query: ${webSearch.searchQuery}. Today is ${currentDate} and here are the results:
|
78 |
+
=====================
|
79 |
+
${webSearch.context}
|
80 |
+
=====================
|
81 |
+
${previousQuestions}
|
82 |
+
Answer the question: ${lastMsg.content}
|
83 |
+
`,
|
84 |
+
},
|
85 |
+
];
|
86 |
+
}
|
87 |
+
|
88 |
+
const messagesOpenAI = messages.map((message) => ({
|
89 |
role: message.from,
|
90 |
content: message.content,
|
91 |
}));
|
|
|
94 |
await openai.chat.completions.create({
|
95 |
model: model.id ?? model.name,
|
96 |
messages: conversation.preprompt
|
97 |
+
? [{ role: "system", content: conversation.preprompt }, ...messagesOpenAI]
|
98 |
+
: messagesOpenAI,
|
99 |
stream: true,
|
100 |
max_tokens: model.parameters?.max_new_tokens,
|
101 |
stop: model.parameters?.stop,
|