feat: use open ai endpoints

This commit is contained in:
Yann Amsellem
2025-04-29 17:25:48 +02:00
parent b45c84e5ae
commit 77da3391de
8 changed files with 125 additions and 38 deletions

View File

@@ -14,7 +14,7 @@
<ul role="listbox">
{#each models as m (m.name + m.brand)}
{@const isSelected =
m.brand === model.brand && m.name === model.name && m.endpoint === model.endpoint}
m.brand === model.brand && m.name === model.name && m.baseURL === model.baseURL}
<li role="option" aria-selected={isSelected}>
<button
title={[m.brand, m.name].filter(Boolean).join(' • ')}

View File

@@ -11,7 +11,8 @@
import ChangeModelBox from './ChangeModelBox.svelte';
import DatasetsBox from './DatasetsBox.svelte';
import Loader from './Loader.svelte';
import type { ChatInput, ChatOutput, Model } from './types';
import { OpenAIClient } from './OpenAI';
import type { ChatInput, Model } from './types';
interface Props {
messages?: ChatInput['messages'];
@@ -43,6 +44,7 @@
let abortController: AbortController | undefined;
let chatMessages = $derived(messages.filter((m) => m.role === 'user' || m.role === 'assistant'));
let modelSelectbox = $state<ReturnType<typeof Select>>();
const uid = $props.id();
function getContextFromTable(table: Table): string {
const columns = table.columns.map((col) => `- ${col.name} (${col.type})`).join('\n');
@@ -53,6 +55,8 @@
dataset ??= datasets?.at(0);
});
const client = $derived(new OpenAIClient(selectedModel.baseURL));
async function handleSubmit(
event: SubmitEvent & { currentTarget: EventTarget & HTMLFormElement }
) {
@@ -70,26 +74,18 @@
try {
abortController = new AbortController();
const response = await fetch(event.currentTarget.action, {
method: event.currentTarget.method,
headers: { 'Content-type': 'application/json' },
body: JSON.stringify({
const completion = await client.createChatCompletion(
{
model: selectedModel.name,
messages: dataset
? [{ role: 'user', content: getContextFromTable(dataset) }, ...messages]
? [{ role: 'system', content: getContextFromTable(dataset) }, ...messages]
: messages,
stream: false
}),
signal: abortController.signal
});
},
{ signal: abortController.signal }
);
if (!response.ok) {
console.error(await response.text());
return;
}
const output: ChatOutput = await response.json();
messages = messages.concat(output.message);
messages = messages.concat(completion.choices[0].message);
} catch (e) {
if (e === 'Canceled by user') {
const last = messages.at(-1);
@@ -167,12 +163,7 @@
<article>
<h2>You</h2>
{#if chatMessages.length === 0 && dataset}{@render context(dataset)}{/if}
<form
id="user-message"
action={selectedModel.endpoint}
method="POST"
onsubmit={handleSubmit}
>
<form id="{uid}-user-message" method="POST" onsubmit={handleSubmit}>
<textarea
name="message"
tabindex="0"
@@ -231,9 +222,10 @@
<ChangeModelBox
{models}
bind:model={selectedModel}
onSelect={() => {
onSelect={(m) => {
modelSelectbox?.close();
abortController?.abort('Model changed');
onModelChange(m);
}}
/>
</Select>
@@ -247,7 +239,7 @@
<Stop size="11" />
</button>
{:else}
<button form="user-message" type="submit" bind:this={submitter} title="Send ⌘⏎">
<button form="{uid}-user-message" type="submit" bind:this={submitter} title="Send ⌘⏎">
Send ⌘⏎
</button>
{/if}
@@ -320,6 +312,7 @@
border: none;
padding: 0;
width: 100%;
min-height: 15px;
display: block;
overflow: visible;
}

View File

@@ -0,0 +1,94 @@
interface ModelsResponse {
object: 'list';
data: AIModel[];
}
interface AIModel {
object: 'model';
id: string;
created: number;
owned_by: string;
}
interface ChatCompletionInput {
model: string;
messages: ChatMessage[];
stream?: boolean | undefined;
frequency_penalty?: number | undefined;
max_completion_tokens?: number | undefined;
presence_penalty?: number | undefined;
temperature?: number | undefined;
top_p?: number | undefined;
}
interface ChatMessage {
role: 'user' | 'assistant' | 'system';
content: string;
}
interface ChatCompletionOutput {
object: 'chat.completion';
model: string;
id: string;
created: number;
system_fingerprint: string;
choices: ChatCompletionChoice[];
usage: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
}
interface ChatCompletionChoice {
message: {
role: 'assistant';
content: string;
};
index: number;
finish_reason: 'length' | 'stop' | 'content_filter';
}
interface RequestOptions {
signal?: AbortSignal;
fetch?: typeof fetch;
}
export class OpenAIClient {
private readonly baseUrl: string;
private readonly headers: Headers;
constructor(baseUrl: string) {
this.baseUrl = baseUrl.replace(/\/$/, ''); // Remove trailing slash if present
this.headers = new Headers({ 'Content-Type': 'application/json' });
}
async listModels(options: RequestOptions = {}) {
options.fetch ??= fetch;
const response = await options.fetch(`${this.baseUrl}/v1/models`, {
headers: this.headers,
signal: options.signal
});
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
const data = (await response.json()) as ModelsResponse;
return data.data;
}
async createChatCompletion(input: ChatCompletionInput, options: RequestOptions = {}) {
options.fetch ??= fetch;
const response = await fetch(`${this.baseUrl}/v1/chat/completions`, {
method: 'POST',
headers: this.headers,
body: JSON.stringify(input),
signal: options.signal
});
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
const data = (await response.json()) as ChatCompletionOutput;
return data;
}
}

View File

@@ -60,20 +60,20 @@
<button class="add-chat" onclick={() => add('New Chat')}><Plus size="14" /></button>
</div>
</nav>
<div>
{#if current}
{#each chats as chat (chat.id)}
<div style:display={chat.id === current?.id ? '' : 'none'}>
<ChatComponent
bind:dataset={current.dataset}
bind:dataset={chat.dataset}
{datasets}
bind:messages={current.messages}
onClearConversation={() => (current.messages = [])}
bind:messages={chat.messages}
onClearConversation={() => (chat.messages = [])}
{onOpenInEditor}
{models}
{selectedModel}
{onModelChange}
/>
{/if}
</div>
</div>
{/each}
</div>
<style>

View File

@@ -1,6 +1,7 @@
import type { Table } from '$lib/olap-engine';
import type { ChatInput, Model } from './types';
export { OpenAIClient } from './OpenAI';
export { default as AiPanel } from './Panel.svelte';
export interface Chat {
@@ -15,7 +16,7 @@ export type { Model };
export const ArgnosticModel: Model = {
brand: 'Agnostic',
name: 'Agnostic AI (v0)',
endpoint: 'https://ai.agx.app/api/chat'
baseURL: 'https://ai.agx.app/'
};
export function serializeModel(model: Model) {

View File

@@ -1,6 +1,6 @@
export interface ChatInput {
messages: {
role: 'user' | 'assistant' | 'system' | 'tool';
role: 'user' | 'assistant' | 'system';
content: string;
}[];
stream?: false | undefined;
@@ -14,5 +14,5 @@ export interface ChatOutput {
export interface Model {
name: string;
brand: string;
endpoint: string;
baseURL: string;
}

View File

@@ -2,7 +2,6 @@ import type { Model } from '$lib/components/Ai';
import { OllamaClient } from './client';
const ollama = new OllamaClient(OLLAMA_BASE_URL);
const OLLAMA_CHAT_API_ENDPOINT = `${OLLAMA_BASE_URL}/api/chat`;
export async function isInstalled() {
try {
@@ -15,5 +14,5 @@ export async function isInstalled() {
export async function getModels(): Promise<Model[]> {
const models = await ollama.listModels();
return models.map((m) => ({ brand: 'Ollama', name: m.name, endpoint: OLLAMA_CHAT_API_ENDPOINT }));
return models.map((m) => ({ brand: 'Ollama', name: m.name, baseURL: OLLAMA_BASE_URL }));
}

View File

@@ -384,7 +384,7 @@ LIMIT 100;`;
if (!stored) return fallback;
return (
data.models.find(
(m) => m.name === stored.name && m.brand === stored.brand && m.endpoint === stored.endpoint
(m) => m.name === stored.name && m.brand === stored.brand && m.baseURL === stored.baseURL
) ?? fallback
);
});