import { OPENAI_API_BASE_URL } from '$lib/constants'; import { titleGenerationTemplate } from '$lib/utils'; import { type Model, models, settings } from '$lib/stores'; export const getOpenAIConfig = async (token: string = '') => { let error = null; const res = await fetch(`${OPENAI_API_BASE_URL}/config`, { method: 'GET', headers: { Accept: 'application/json', 'Content-Type': 'application/json', ...(token && { authorization: `Bearer ${token}` }) } }) .then(async (res) => { if (!res.ok) throw await res.json(); return res.json(); }) .catch((err) => { console.log(err); if ('detail' in err) { error = err.detail; } else { error = 'Server connection failed'; } return null; }); if (error) { throw error; } return res; }; export const updateOpenAIConfig = async (token: string = '', enable_openai_api: boolean) => { let error = null; const res = await fetch(`${OPENAI_API_BASE_URL}/config/update`, { method: 'POST', headers: { Accept: 'application/json', 'Content-Type': 'application/json', ...(token && { authorization: `Bearer ${token}` }) }, body: JSON.stringify({ enable_openai_api: enable_openai_api }) }) .then(async (res) => { if (!res.ok) throw await res.json(); return res.json(); }) .catch((err) => { console.log(err); if ('detail' in err) { error = err.detail; } else { error = 'Server connection failed'; } return null; }); if (error) { throw error; } return res; }; export const getOpenAIUrls = async (token: string = '') => { let error = null; const res = await fetch(`${OPENAI_API_BASE_URL}/urls`, { method: 'GET', headers: { Accept: 'application/json', 'Content-Type': 'application/json', ...(token && { authorization: `Bearer ${token}` }) } }) .then(async (res) => { if (!res.ok) throw await res.json(); return res.json(); }) .catch((err) => { console.log(err); if ('detail' in err) { error = err.detail; } else { error = 'Server connection failed'; } return null; }); if (error) { throw error; } return res.OPENAI_API_BASE_URLS; }; export const updateOpenAIUrls = async (token: string = '', urls: string[]) => { let error = null; const res = await fetch(`${OPENAI_API_BASE_URL}/urls/update`, { method: 'POST', headers: { Accept: 'application/json', 'Content-Type': 'application/json', ...(token && { authorization: `Bearer ${token}` }) }, body: JSON.stringify({ urls: urls }) }) .then(async (res) => { if (!res.ok) throw await res.json(); return res.json(); }) .catch((err) => { console.log(err); if ('detail' in err) { error = err.detail; } else { error = 'Server connection failed'; } return null; }); if (error) { throw error; } return res.OPENAI_API_BASE_URLS; }; export const getOpenAIKeys = async (token: string = '') => { let error = null; const res = await fetch(`${OPENAI_API_BASE_URL}/keys`, { method: 'GET', headers: { Accept: 'application/json', 'Content-Type': 'application/json', ...(token && { authorization: `Bearer ${token}` }) } }) .then(async (res) => { if (!res.ok) throw await res.json(); return res.json(); }) .catch((err) => { console.log(err); if ('detail' in err) { error = err.detail; } else { error = 'Server connection failed'; } return null; }); if (error) { throw error; } return res.OPENAI_API_KEYS; }; export const updateOpenAIKeys = async (token: string = '', keys: string[]) => { let error = null; const res = await fetch(`${OPENAI_API_BASE_URL}/keys/update`, { method: 'POST', headers: { Accept: 'application/json', 'Content-Type': 'application/json', ...(token && { authorization: `Bearer ${token}` }) }, body: JSON.stringify({ keys: keys }) }) .then(async (res) => { if (!res.ok) throw await res.json(); return res.json(); }) .catch((err) => { console.log(err); if ('detail' in err) { error = err.detail; } else { error = 'Server connection failed'; } return null; }); if (error) { throw error; } return res.OPENAI_API_KEYS; }; export const getOpenAIModels = async (token: string, urlIdx?: number) => { let error = null; const res = await fetch( `${OPENAI_API_BASE_URL}/models${typeof urlIdx === 'number' ? `/${urlIdx}` : ''}`, { method: 'GET', headers: { Accept: 'application/json', 'Content-Type': 'application/json', ...(token && { authorization: `Bearer ${token}` }) } } ) .then(async (res) => { if (!res.ok) throw await res.json(); return res.json(); }) .catch((err) => { error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`; return []; }); if (error) { throw error; } return res; }; export const getOpenAIModelsDirect = async ( base_url: string = 'https://api.openai.com/v1', api_key: string = '' ) => { let error = null; const res = await fetch(`${base_url}/models`, { method: 'GET', headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${api_key}` } }) .then(async (res) => { if (!res.ok) throw await res.json(); return res.json(); }) .catch((err) => { console.log(err); error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`; return null; }); if (error) { throw error; } const models = Array.isArray(res) ? res : res?.data ?? null; return models .map((model) => ({ id: model.id, name: model.name ?? model.id, external: true })) .filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true)) .sort((a, b) => { return a.name.localeCompare(b.name); }); }; export const generateOpenAIChatCompletion = async ( token: string = '', body: object, url: string = OPENAI_API_BASE_URL ): Promise<[Response | null, AbortController]> => { const controller = new AbortController(); let error = null; const res = await fetch(`${url}/chat/completions`, { signal: controller.signal, method: 'POST', headers: { Authorization: `Bearer ${token}`, 'Content-Type': 'application/json' }, body: JSON.stringify(body) }).catch((err) => { console.log(err); error = err; return null; }); if (error) { throw error; } return [res, controller]; }; export const synthesizeOpenAISpeech = async ( token: string = '', speaker: string = 'alloy', text: string = '', model: string = 'tts-1' ) => { let error = null; const res = await fetch(`${OPENAI_API_BASE_URL}/audio/speech`, { method: 'POST', headers: { Authorization: `Bearer ${token}`, 'Content-Type': 'application/json' }, body: JSON.stringify({ model: model, input: text, voice: speaker }) }).catch((err) => { console.log(err); error = err; return null; }); if (error) { throw error; } return res; }; export const generateTitle = async ( token: string = '', template: string, model: string, prompt: string, chat_id?: string, url: string = OPENAI_API_BASE_URL ) => { let error = null; template = titleGenerationTemplate(template, prompt); console.log(template); const res = await fetch(`${url}/chat/completions`, { method: 'POST', headers: { Accept: 'application/json', 'Content-Type': 'application/json', Authorization: `Bearer ${token}` }, body: JSON.stringify({ model: model, messages: [ { role: 'user', content: template } ], stream: false, // Restricting the max tokens to 50 to avoid long titles max_tokens: 50, ...(chat_id && { chat_id: chat_id }), title: true }) }) .then(async (res) => { if (!res.ok) throw await res.json(); return res.json(); }) .catch((err) => { console.log(err); if ('detail' in err) { error = err.detail; } return null; }); if (error) { throw error; } return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? 'New Chat'; }; export const generateSearchQuery = async ( token: string = '', model: string, previousMessages: string[], prompt: string, url: string = OPENAI_API_BASE_URL ): Promise => { let error = null; // TODO: Allow users to specify the prompt // Get the current date in the format "January 20, 2024" const currentDate = new Intl.DateTimeFormat('en-US', { year: 'numeric', month: 'long', day: '2-digit' }).format(new Date()); const res = await fetch(`${url}/chat/completions`, { method: 'POST', headers: { Accept: 'application/json', 'Content-Type': 'application/json', Authorization: `Bearer ${token}` }, body: JSON.stringify({ model: model, // Few shot prompting messages: [ { role: 'assistant', content: `You are tasked with generating web search queries. Give me an appropriate query to answer my question for google search. Answer with only the query. Today is ${currentDate}.` }, { role: 'user', content: prompt } // { // role: 'user', // content: // (previousMessages.length > 0 // ? `Previous Questions:\n${previousMessages.join('\n')}\n\n` // : '') + `Current Question: ${prompt}` // } ], stream: false, // Restricting the max tokens to 30 to avoid long search queries max_tokens: 30 }) }) .then(async (res) => { if (!res.ok) throw await res.json(); return res.json(); }) .catch((err) => { console.log(err); if ('detail' in err) { error = err.detail; } return undefined; }); if (error) { throw error; } return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? undefined; };