nsarrazin's picture
nsarrazin HF staff
Modular backends & support for openAI & AWS endpoints (#541)
9db8ced unverified
raw
history blame
1.02 kB
import { HF_ACCESS_TOKEN } from "$env/static/private";
import { buildPrompt } from "$lib/buildPrompt";
import { textGenerationStream } from "@huggingface/inference";
import type { Endpoint } from "../endpoints";
import { z } from "zod";
export const endpointTgiParametersSchema = z.object({
weight: z.number().int().positive().default(1),
model: z.any(),
type: z.literal("tgi"),
url: z.string().url(),
accessToken: z.string().min(1).default(HF_ACCESS_TOKEN),
});
export function endpointTgi({
url,
accessToken,
model,
}: z.infer<typeof endpointTgiParametersSchema>): Endpoint {
return async ({ conversation }) => {
const prompt = await buildPrompt({
messages: conversation.messages,
webSearch: conversation.messages[conversation.messages.length - 1].webSearch,
preprompt: conversation.preprompt,
model,
});
return textGenerationStream({
parameters: { ...model.parameters, return_full_text: false },
model: url,
inputs: prompt,
accessToken,
});
};
}
export default endpointTgi;