File size: 1,627 Bytes
9db8ced 14f0244 9db8ced 0e5c445 9db8ced 26efb26 9db8ced 14f0244 9db8ced 14f0244 9db8ced |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
import type { Conversation } from "$lib/types/Conversation";
import type { TextGenerationStreamOutput } from "@huggingface/inference";
import { endpointTgi, endpointTgiParametersSchema } from "./tgi/endpointTgi";
import { z } from "zod";
import endpointAws, { endpointAwsParametersSchema } from "./aws/endpointAws";
import { endpointOAIParametersSchema, endpointOai } from "./openai/endpointOai";
import endpointLlamacpp, { endpointLlamacppParametersSchema } from "./llamacpp/endpointLlamacpp";
import endpointOllama, { endpointOllamaParametersSchema } from "./ollama/endpointOllama";
// parameters passed when generating text
interface EndpointParameters {
conversation: {
messages: Omit<Conversation["messages"][0], "id">[];
preprompt?: Conversation["preprompt"];
_id?: Conversation["_id"];
};
}
interface CommonEndpoint {
weight: number;
}
// type signature for the endpoint
export type Endpoint = (
params: EndpointParameters
) => Promise<AsyncGenerator<TextGenerationStreamOutput, void, void>>;
// generator function that takes in parameters for defining the endpoint and return the endpoint
export type EndpointGenerator<T extends CommonEndpoint> = (parameters: T) => Endpoint;
// list of all endpoint generators
export const endpoints = {
tgi: endpointTgi,
aws: endpointAws,
openai: endpointOai,
llamacpp: endpointLlamacpp,
ollama: endpointOllama,
};
export const endpointSchema = z.discriminatedUnion("type", [
endpointAwsParametersSchema,
endpointOAIParametersSchema,
endpointTgiParametersSchema,
endpointLlamacppParametersSchema,
endpointOllamaParametersSchema,
]);
export default endpoints;
|