Multi model from api

This commit is contained in:
Sebarocks 2025-07-31 15:59:19 -04:00
parent 15d74420f7
commit 0a894237bb
5 changed files with 62 additions and 12 deletions

3
app.py
View file

@ -1,6 +1,6 @@
from starlette.applications import Starlette from starlette.applications import Starlette
from starlette.routing import Route from starlette.routing import Route
from controllers import create_chat, post_message, chat_stream, history from controllers import create_chat, post_message, chat_stream, history, get_models
from starlette.middleware import Middleware from starlette.middleware import Middleware
from starlette.middleware.cors import CORSMiddleware from starlette.middleware.cors import CORSMiddleware
@ -15,6 +15,7 @@ middleware = [
] ]
routes = [ routes = [
Route("/models", get_models, methods=["GET"]),
Route("/chats", create_chat, methods=["POST"]), Route("/chats", create_chat, methods=["POST"]),
Route("/chats/{chat_id:str}", history, methods=["GET"]), Route("/chats/{chat_id:str}", history, methods=["GET"]),
Route("/chats/{chat_id:str}/messages", post_message, methods=["POST"]), Route("/chats/{chat_id:str}/messages", post_message, methods=["POST"]),

View file

@ -39,20 +39,24 @@
<select <select
class="select select-bordered join-item" class="select select-bordered join-item"
bind:value={chatStore.model} bind:value={chatStore.model}
disabled={chatStore.loading} disabled={chatStore.loading || chatStore.loadingModels}
> >
<option value="qwen/qwen3-235b-a22b-2507" {#if chatStore.loadingModels}
>qwen/qwen3-235b-a22b-2507</option <option value="" disabled>Loading models...</option>
> {:else if chatStore.models.length === 0}
<option value="deepseek/deepseek-r1-0528" <option value="" disabled>No model available</option>
>deepseek/deepseek-r1-0528</option {:else}
> {#each chatStore.models as modelOption}
<option value="moonshotai/kimi-k2">moonshotai/kimi-k2</option> <option value={modelOption.id || modelOption}>
{modelOption.name || modelOption.id || modelOption}
</option>
{/each}
{/if}
</select> </select>
<button <button
class="btn btn-primary ml-auto" class="btn btn-primary ml-auto"
onclick={chatStore.send} onclick={chatStore.send}
disabled={!chatStore.input.trim()} disabled={!chatStore.input.trim() || chatStore.models.length === 0}
> >
{#if chatStore.loading} {#if chatStore.loading}
<span class="loading loading-spinner loading-xs"></span> <span class="loading loading-spinner loading-xs"></span>

View file

@ -22,3 +22,14 @@ export function openStream(chatId, messageId) {
`${API}/chats/${chatId}/stream?message_id=${messageId}`, `${API}/chats/${chatId}/stream?message_id=${messageId}`,
); );
} }
export async function fetchModels() {
try {
const response = await fetch(`${API}/models`);
const data = await response.json();
return data.models || [];
} catch (error) {
console.error('Failed to fetch models:', error);
return [];
}
}

View file

@ -1,4 +1,4 @@
import { createChat, sendUserMessage, openStream } from "./chatApi.svelte.js"; import { createChat, sendUserMessage, openStream, fetchModels } from "./chatApi.svelte.js";
const STORAGE_KEY = "chatHistory"; const STORAGE_KEY = "chatHistory";
@ -19,7 +19,9 @@ export const chatStore = (() => {
let messages = $state([]); let messages = $state([]);
let loading = $state(false); let loading = $state(false);
let input = $state(""); let input = $state("");
let model = $state("qwen/qwen3-235b-a22b-2507"); let model = $state("qwen/qwen3-235b-a22b-2507"); // default
let models = $state([]);
let loadingModels = $state(true);
// public helpers // public helpers
const history = $derived(loadHistory()); const history = $derived(loadHistory());
@ -81,6 +83,17 @@ export const chatStore = (() => {
}); });
} }
async function loadModels() {
loadingModels = true;
models = await fetchModels();
loadingModels = false;
// Set default model if available and not already set
if (models.length > 0 && !model) {
model = models[0].id || models[0];
}
}
function handleKey(e) { function handleKey(e) {
if (e.key === "Enter" && !e.shiftKey) { if (e.key === "Enter" && !e.shiftKey) {
e.preventDefault(); e.preventDefault();
@ -97,6 +110,9 @@ export const chatStore = (() => {
selectChat(path); selectChat(path);
} }
// Load models on initialization
loadModels();
return { return {
get chatId() { get chatId() {
return chatId; return chatId;
@ -119,6 +135,12 @@ export const chatStore = (() => {
set model(v) { set model(v) {
model = v; model = v;
}, },
get models() {
return models;
},
get loadingModels() {
return loadingModels;
},
get history() { get history() {
return loadHistory(); return loadHistory();
}, },
@ -126,5 +148,6 @@ export const chatStore = (() => {
send, send,
handleKey, handleKey,
createAndSelect, createAndSelect,
loadModels,
}; };
})(); })();

View file

@ -13,8 +13,19 @@ MODELS = {
"qwen/qwen3-235b-a22b-2507", "qwen/qwen3-235b-a22b-2507",
"deepseek/deepseek-r1-0528", "deepseek/deepseek-r1-0528",
"moonshotai/kimi-k2", "moonshotai/kimi-k2",
"x-ai/grok-4",
"openai/gpt-4.1",
"anthropic/claude-sonnet-4",
"meta-llama/llama-4-maverick",
"mistralai/devstral-medium",
"qwen/qwen3-coder",
"google/gemini-2.5-pro",
} }
async def get_models(request: Request):
"""GET /models -> {models: [...]}"""
return JSONResponse({"models": list(MODELS)})
async def create_chat(request: Request): async def create_chat(request: Request):
"""POST /chats -> {chat_id, model}""" """POST /chats -> {chat_id, model}"""
body = await request.json() body = await request.json()