Chat History and multi model
This commit is contained in:
parent
a9ffb48b4b
commit
44f391ef1e
13 changed files with 1072 additions and 839 deletions
9
app.py
9
app.py
|
|
@ -1,13 +1,13 @@
|
|||
from starlette.applications import Starlette
|
||||
from starlette.routing import Route
|
||||
from controllers import create_chat, post_message, stream_response
|
||||
from controllers import create_chat, post_message, chat_stream, history
|
||||
from starlette.middleware import Middleware
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
|
||||
middleware = [
|
||||
Middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"], # change to ["http://localhost:3000"] etc. in prod
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
|
|
@ -16,8 +16,9 @@ middleware = [
|
|||
|
||||
routes = [
|
||||
Route("/chats", create_chat, methods=["POST"]),
|
||||
Route("/chats/{chat_id:str}", history, methods=["GET"]),
|
||||
Route("/chats/{chat_id:str}/messages", post_message, methods=["POST"]),
|
||||
Route("/chats/{chat_id:str}/stream", stream_response, methods=["GET"]),
|
||||
Route("/chats/{chat_id:str}/stream", chat_stream, methods=["GET"]),
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -26,4 +27,4 @@ application = Starlette(debug=True, routes=routes, middleware=middleware)
|
|||
# ----------------- Run -----------------
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(application, host="0.0.0.0", port=8000)
|
||||
uvicorn.run("app:application", host="0.0.0.0", port=8000, reload=True)
|
||||
|
|
|
|||
28
chatgraph.py
28
chatgraph.py
|
|
@ -1,29 +1,17 @@
|
|||
from typing import TypedDict
|
||||
from langgraph.graph import StateGraph, START
|
||||
from langgraph.checkpoint.memory import MemorySaver
|
||||
from langchain_openai import ChatOpenAI
|
||||
from langchain_core.messages import HumanMessage, AIMessage
|
||||
from os import getenv
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
class State(TypedDict):
|
||||
messages: list
|
||||
|
||||
llm = ChatOpenAI(
|
||||
def get_llm(provider: str):
|
||||
"""Return a LangChain chat model for the requested provider."""
|
||||
return ChatOpenAI(
|
||||
openai_api_key=getenv("OPENROUTER_API_KEY"),
|
||||
openai_api_base=getenv("OPENROUTER_BASE_URL"),
|
||||
model_name="qwen/qwen3-235b-a22b-07-25",
|
||||
)
|
||||
model_name=provider,
|
||||
)
|
||||
|
||||
def call_model(state: State):
|
||||
# simple memory trim example: keep last 20 messages
|
||||
trimmed = state["messages"][-20:]
|
||||
response = llm.invoke(trimmed)
|
||||
return {"messages": [response]}
|
||||
|
||||
workflow = StateGraph(State)
|
||||
workflow.add_node("model", call_model)
|
||||
workflow.add_edge(START, "model")
|
||||
memory = MemorySaver()
|
||||
app_graph = workflow.compile(checkpointer=memory)
|
||||
def get_messages(chats, chat_id):
|
||||
return [HumanMessage(**m) if m["role"] == "human" else AIMessage(**m) for m in chats[chat_id]]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,16 @@
|
|||
<script>
|
||||
import Chat from './lib/Chat.svelte';
|
||||
import Chat from "./lib/Chat.svelte";
|
||||
import ChatList from "./lib/ChatList.svelte";
|
||||
</script>
|
||||
|
||||
<Chat />
|
||||
<div class="drawer lg:drawer-open">
|
||||
<input id="drawer-toggle" type="checkbox" class="drawer-toggle" />
|
||||
<div class="drawer-content flex flex-col h-screen">
|
||||
<Chat />
|
||||
</div>
|
||||
|
||||
<div class="drawer-side">
|
||||
<label for="drawer-toggle" class="drawer-overlay"></label>
|
||||
<ChatList />
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -1,20 +1,17 @@
|
|||
<script>
|
||||
import ChatMessage from './ChatMessage.svelte';
|
||||
import { chatStore } from './chatStore.svelte';
|
||||
|
||||
import ChatMessage from "./ChatMessage.svelte";
|
||||
import { chatStore } from "./chatStore.svelte.js";
|
||||
</script>
|
||||
|
||||
<div class="flex flex-col max-w-4/5 h-screen mx-auto p-5">
|
||||
<!-- header -->
|
||||
<header class="p-4">
|
||||
<h1 class="text-5xl font-bold">Simple AI Chat</h1>
|
||||
</header>
|
||||
|
||||
<!-- messages -->
|
||||
<main class="flex-1 p-4 space-y-3 overflow-y-auto">
|
||||
<!-- header -->
|
||||
<header class="p-4">
|
||||
<h1 class="text-5xl font-bold">Multi AI Chat</h1>
|
||||
</header>
|
||||
|
||||
<!-- messages -->
|
||||
<main class="flex-1 p-4 space-y-3 overflow-y-auto">
|
||||
{#each chatStore.messages as m (m.id)}
|
||||
<ChatMessage {m} />
|
||||
<ChatMessage message={m} />
|
||||
{/each}
|
||||
|
||||
{#if chatStore.loading}
|
||||
|
|
@ -22,22 +19,38 @@
|
|||
<div class="chat-bubble chat-bubble-secondary loading"></div>
|
||||
</div>
|
||||
{/if}
|
||||
</main>
|
||||
|
||||
</main>
|
||||
|
||||
<!-- input -->
|
||||
<footer class="p-2 bg-base-200">
|
||||
<div class="join w-full p-1">
|
||||
<!-- input -->
|
||||
<footer class="bg-neutral-content rounded-xl">
|
||||
<div class="flex items-center">
|
||||
<div class="form-control flex-1 m-4">
|
||||
<textarea
|
||||
class="textarea textarea-bordered join-item flex-1 m-1"
|
||||
class="textarea w-full"
|
||||
rows="1"
|
||||
placeholder="Type something…"
|
||||
bind:value={chatStore.input}
|
||||
onkeydown={chatStore.handleKey}
|
||||
disabled={chatStore.loading}
|
||||
></textarea>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex items-center m-4">
|
||||
<select
|
||||
class="select select-bordered join-item"
|
||||
bind:value={chatStore.model}
|
||||
disabled={chatStore.loading}
|
||||
>
|
||||
<option value="qwen/qwen3-235b-a22b-2507"
|
||||
>qwen/qwen3-235b-a22b-2507</option
|
||||
>
|
||||
<option value="deepseek/deepseek-r1-0528"
|
||||
>deepseek/deepseek-r1-0528</option
|
||||
>
|
||||
<option value="moonshotai/kimi-k2">moonshotai/kimi-k2</option>
|
||||
</select>
|
||||
<button
|
||||
class="btn btn-primary join-item m-2"
|
||||
class="btn btn-primary ml-auto"
|
||||
onclick={chatStore.send}
|
||||
disabled={!chatStore.input.trim()}
|
||||
>
|
||||
|
|
@ -46,5 +59,4 @@
|
|||
{:else}Send{/if}
|
||||
</button>
|
||||
</div>
|
||||
</footer>
|
||||
</div>
|
||||
</footer>
|
||||
|
|
|
|||
33
chatsbt/src/lib/ChatList.svelte
Normal file
33
chatsbt/src/lib/ChatList.svelte
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
<script>
|
||||
import { chatStore } from "./chatStore.svelte.js";
|
||||
</script>
|
||||
|
||||
<aside class="menu p-4 w-64 bg-base-200 min-h-full">
|
||||
<div class="flex justify-between items-center mb-4">
|
||||
<span class="text-lg font-bold">Chats</span>
|
||||
<button
|
||||
class="btn btn-xs btn-primary"
|
||||
onclick={() =>
|
||||
chatStore.selectChat(null) && chatStore.createAndSelect()}
|
||||
>
|
||||
New
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<ul class="menu menu-compact">
|
||||
{#each chatStore.history as c}
|
||||
<li>
|
||||
<a
|
||||
href="/{c.id}"
|
||||
class={chatStore.chatId === c.id ? "active" : ""}
|
||||
onclick={(e) => {
|
||||
e.preventDefault();
|
||||
chatStore.selectChat(c.id);
|
||||
}}
|
||||
>
|
||||
{c.title}
|
||||
</a>
|
||||
</li>
|
||||
{/each}
|
||||
</ul>
|
||||
</aside>
|
||||
|
|
@ -1,23 +1,29 @@
|
|||
<script>
|
||||
import { marked } from 'marked';
|
||||
export let m; // { text, me, sender }
|
||||
import { marked } from "marked";
|
||||
let { message } = $props(); // { id, role, text }
|
||||
|
||||
const text = $derived(message.text);
|
||||
const me = $derived(message.role == "user");
|
||||
|
||||
/* optional: allow HTML inside the markdown (default is escaped) */
|
||||
marked.setOptions({ breaks: true, gfm: true });
|
||||
|
||||
</script>
|
||||
|
||||
{#if m.me}
|
||||
{#if me}
|
||||
<div class="chat chat-end">
|
||||
<div class="chat-bele chat-bubble chat-bubble-primary">
|
||||
{m.text}
|
||||
{text}
|
||||
</div>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="chat chat-start">
|
||||
<div class="chat-bele chat-bubble {m.sender === 'error' ? 'text-error' : ''} prose max-w-none" >
|
||||
<div
|
||||
class="chat-bele chat-bubble {message.role === 'error'
|
||||
? 'text-error'
|
||||
: ''} prose max-w-none"
|
||||
>
|
||||
<!-- eslint-disable svelte/no-at-html-tags -->
|
||||
{@html marked(m.text)}
|
||||
{@html marked(text)}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
24
chatsbt/src/lib/chatApi.svelte.js
Normal file
24
chatsbt/src/lib/chatApi.svelte.js
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
const API = "http://localhost:8000"; // change if needed
|
||||
|
||||
export async function createChat(model = "qwen/qwen3-235b-a22b-2507") {
|
||||
const r = await fetch(`${API}/chats`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ model }),
|
||||
});
|
||||
return r.json(); // { chat_id }
|
||||
}
|
||||
|
||||
export async function sendUserMessage(chatId, text, model = "") {
|
||||
const r = await fetch(`${API}/chats/${chatId}/messages`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ message: text, model }),
|
||||
});
|
||||
return r.json(); // { message_id }
|
||||
}
|
||||
|
||||
export function openStream(chatId, messageId) {
|
||||
return new EventSource(
|
||||
`${API}/chats/${chatId}/stream?message_id=${messageId}`,
|
||||
);
|
||||
}
|
||||
|
|
@ -1,86 +1,130 @@
|
|||
// Pure vanilla JS – no Svelte imports
|
||||
const BASE = import.meta.env.VITE_API_URL ?? 'http://localhost:8000';
|
||||
import { createChat, sendUserMessage, openStream } from "./chatApi.svelte.js";
|
||||
|
||||
const STORAGE_KEY = "chatHistory";
|
||||
|
||||
function loadHistory() {
|
||||
try {
|
||||
return JSON.parse(localStorage.getItem(STORAGE_KEY) || "[]");
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function saveHistory(list) {
|
||||
localStorage.setItem(STORAGE_KEY, JSON.stringify(list));
|
||||
}
|
||||
|
||||
export const chatStore = (() => {
|
||||
let chatId = $state(null); // null until first message
|
||||
let chatId = $state(null);
|
||||
let messages = $state([]);
|
||||
let input = $state('');
|
||||
let loading = $state(false);
|
||||
let input = $state("");
|
||||
let model = $state("qwen/qwen3-235b-a22b-2507");
|
||||
|
||||
/* ── helpers ── */
|
||||
async function createChat() {
|
||||
const res = await fetch(`${BASE}/chats`, { method: 'POST' });
|
||||
const { id } = await res.json();
|
||||
// public helpers
|
||||
const history = $derived(loadHistory());
|
||||
|
||||
function pushHistory(id, title, msgs) {
|
||||
console.log(`push history: ${id} - ${title}`);
|
||||
const h = history.filter((c) => c.id !== id);
|
||||
h.unshift({ id, title, messages: msgs });
|
||||
saveHistory(h.slice(0, 50)); // keep last 50
|
||||
}
|
||||
|
||||
async function selectChat(id) {
|
||||
if (id === chatId) return;
|
||||
chatId = id;
|
||||
const stored = loadHistory().find((c) => c.id === id);
|
||||
messages = stored?.messages || [];
|
||||
loading = true;
|
||||
loading = false;
|
||||
window.history.replaceState({}, "", `/${id}`);
|
||||
}
|
||||
|
||||
async function createAndSelect() {
|
||||
const { id } = await createChat(model);
|
||||
console.log(id);
|
||||
selectChat(id);
|
||||
return id;
|
||||
}
|
||||
|
||||
async function sendUserMessage(text) {
|
||||
await fetch(`${BASE}/chats/${chatId}/messages`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ message: text })
|
||||
async function send() {
|
||||
if (!input.trim()) return;
|
||||
if (!chatId) await createAndSelect();
|
||||
|
||||
const userMsg = { id: crypto.randomUUID(), role: "user", text: input };
|
||||
messages = [...messages, userMsg];
|
||||
|
||||
pushHistory(chatId, userMsg.text.slice(0, 30), messages);
|
||||
|
||||
loading = true;
|
||||
const { message_id } = await sendUserMessage(chatId, input, model);
|
||||
input = "";
|
||||
|
||||
let assistantMsg = { id: message_id, role: "assistant", text: "" };
|
||||
messages = [...messages, assistantMsg];
|
||||
|
||||
const es = openStream(chatId, message_id);
|
||||
es.onmessage = (e) => {
|
||||
assistantMsg = { ...assistantMsg, text: assistantMsg.text + e.data };
|
||||
messages = [...messages.slice(0, -1), assistantMsg];
|
||||
};
|
||||
es.onerror = () => {
|
||||
es.close();
|
||||
loading = false;
|
||||
};
|
||||
es.addEventListener("done", (e) => {
|
||||
console.log(e);
|
||||
es.close();
|
||||
loading = false;
|
||||
pushHistory(chatId, userMsg.text.slice(0, 30), messages);
|
||||
});
|
||||
}
|
||||
|
||||
function streamAssistantReply() {
|
||||
const source = new EventSource(`${BASE}/chats/${chatId}/stream`);
|
||||
let botMsg = { id: Date.now(), text: '', me: false, sender: 'bot' };
|
||||
messages = [...messages, botMsg];
|
||||
|
||||
source.onmessage = (ev) => {
|
||||
console.log(ev.data);
|
||||
if (ev.data === '[DONE]') {
|
||||
source.close();
|
||||
loading = false;
|
||||
return;
|
||||
}
|
||||
|
||||
messages = messages.map((m, i) =>
|
||||
i === messages.length - 1 ? { ...m, text: m.text + ev.data } : m
|
||||
);
|
||||
};
|
||||
|
||||
source.onerror = () => {
|
||||
source.close();
|
||||
loading = false;
|
||||
};
|
||||
}
|
||||
|
||||
async function send() {
|
||||
const text = input.trim();
|
||||
if (!text || loading) return;
|
||||
|
||||
// add user bubble immediately
|
||||
messages = [...messages, { id: Date.now(), text, me: true, sender: 'user' }];
|
||||
input = '';
|
||||
loading = true;
|
||||
|
||||
try {
|
||||
if (!chatId) chatId = await createChat();
|
||||
await sendUserMessage(text);
|
||||
streamAssistantReply();
|
||||
} catch {
|
||||
messages = [
|
||||
...messages,
|
||||
{ id: Date.now(), text: 'Sorry, something went wrong.', me: false, sender: 'bot' }
|
||||
];
|
||||
loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
function handleKey(e) {
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
if (e.key === "Enter" && !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
send();
|
||||
}
|
||||
}
|
||||
|
||||
// initial route handling
|
||||
const path = window.location.pathname.slice(1);
|
||||
const storedHistory = loadHistory();
|
||||
if (path && !storedHistory.find((c) => c.id === path)) {
|
||||
createAndSelect();
|
||||
} else if (path) {
|
||||
selectChat(path);
|
||||
}
|
||||
|
||||
return {
|
||||
get messages() { return messages; },
|
||||
get input() { return input; },
|
||||
set input(v) { input = v; },
|
||||
get loading() { return loading; },
|
||||
get chatId() {
|
||||
return chatId;
|
||||
},
|
||||
get messages() {
|
||||
return messages;
|
||||
},
|
||||
get loading() {
|
||||
return loading;
|
||||
},
|
||||
get input() {
|
||||
return input;
|
||||
},
|
||||
set input(v) {
|
||||
input = v;
|
||||
},
|
||||
get model() {
|
||||
return model;
|
||||
},
|
||||
set model(v) {
|
||||
model = v;
|
||||
},
|
||||
get history() {
|
||||
return loadHistory();
|
||||
},
|
||||
selectChat,
|
||||
send,
|
||||
handleKey
|
||||
handleKey,
|
||||
createAndSelect,
|
||||
};
|
||||
})();
|
||||
31
chatsbt/src/lib/router.svelte.js
Normal file
31
chatsbt/src/lib/router.svelte.js
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
import { chatStore } from "./chatStore.svelte.js";
|
||||
|
||||
// keyed by chat_id → chatStore instance
|
||||
const cache = $state({});
|
||||
|
||||
// which chat is on screen right now
|
||||
export const activeChatId = $state(null);
|
||||
|
||||
export function getStore(chatId) {
|
||||
if (!cache[chatId]) {
|
||||
cache[chatId] = chatStore(chatId);
|
||||
}
|
||||
return cache[chatId];
|
||||
}
|
||||
|
||||
export function switchChat(chatId) {
|
||||
activeChatId = chatId;
|
||||
}
|
||||
|
||||
export function newChat() {
|
||||
const id = "chat_" + crypto.randomUUID();
|
||||
switchChat(id);
|
||||
return id;
|
||||
}
|
||||
|
||||
// restore last opened chat (or create first one)
|
||||
(() => {
|
||||
const ids = JSON.parse(localStorage.getItem("chat_ids") || "[]");
|
||||
if (ids.length) switchChat(ids[0]);
|
||||
else newChat();
|
||||
})();
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
import { vitePreprocess } from '@sveltejs/vite-plugin-svelte'
|
||||
import { vitePreprocess } from "@sveltejs/vite-plugin-svelte";
|
||||
|
||||
export default {
|
||||
// Consult https://svelte.dev/docs#compile-time-svelte-preprocess
|
||||
// for more information about preprocessors
|
||||
preprocess: vitePreprocess(),
|
||||
}
|
||||
dev: true,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,46 +1,83 @@
|
|||
import asyncio
|
||||
import uuid
|
||||
from typing import Dict, List, Tuple
|
||||
from starlette.responses import JSONResponse
|
||||
from starlette.requests import Request
|
||||
from sse_starlette.sse import EventSourceResponse
|
||||
from pydantic import BaseModel
|
||||
from langchain_core.messages import HumanMessage, AIMessage
|
||||
from chatgraph import app_graph
|
||||
from collections import defaultdict
|
||||
from chatgraph import get_messages, get_llm
|
||||
|
||||
pending = defaultdict(asyncio.Queue)
|
||||
|
||||
class ChatIn(BaseModel):
|
||||
message: str
|
||||
CHATS: Dict[str, List[dict]] = {} # chat_id -> messages
|
||||
PENDING: Dict[str, Tuple[str, str]] = {} # message_id -> (chat_id, provider)
|
||||
|
||||
async def create_chat(request):
|
||||
"""POST /chats -> returns {id: <new_chat_id>}"""
|
||||
chat_id = str(uuid.uuid4())[:8]
|
||||
return JSONResponse({"id": chat_id})
|
||||
MODELS = {
|
||||
"qwen/qwen3-235b-a22b-2507",
|
||||
"deepseek/deepseek-r1-0528",
|
||||
"moonshotai/kimi-k2",
|
||||
}
|
||||
|
||||
async def post_message(request):
|
||||
"""POST /chats/{chat_id}/messages"""
|
||||
chat_id = request.path_params["chat_id"]
|
||||
async def create_chat(request: Request):
|
||||
"""POST /chats -> {chat_id, model}"""
|
||||
body = await request.json()
|
||||
msg = ChatIn(**body).message
|
||||
await pending[chat_id].put(msg)
|
||||
return JSONResponse({"status": "queued"})
|
||||
provider = body.get("model","")
|
||||
if provider not in MODELS:
|
||||
return JSONResponse({"error": "Unknown model"}, status_code=400)
|
||||
chat_id = str(uuid.uuid4())[:8]
|
||||
CHATS[chat_id] = []
|
||||
return JSONResponse({"id": chat_id, "model": provider})
|
||||
|
||||
async def stream_response(request):
|
||||
"""GET /chats/{chat_id}/stream (SSE)"""
|
||||
async def history(request : Request):
|
||||
"""GET /chats/{chat_id} -> previous messages"""
|
||||
chat_id = request.path_params["chat_id"]
|
||||
if chat_id not in CHATS:
|
||||
return JSONResponse({"error": "Not found"}, status_code=404)
|
||||
return JSONResponse({"messages": CHATS[chat_id]})
|
||||
|
||||
user_msg = await pending[chat_id].get()
|
||||
async def post_message(request: Request):
|
||||
"""POST /chats/{chat_id}/messages
|
||||
Body: {"message": "...", "model": "model_name"}
|
||||
Returns: {"message_id": "<chat_id>"}
|
||||
"""
|
||||
chat_id = request.path_params["chat_id"]
|
||||
if chat_id not in CHATS:
|
||||
return JSONResponse({"error": "Chat not found"}, status_code=404)
|
||||
|
||||
config = {"configurable": {"thread_id": chat_id}}
|
||||
input_messages = [HumanMessage(content=user_msg)]
|
||||
body = await request.json()
|
||||
user_text = body.get("message", "")
|
||||
provider = body.get("model", "")
|
||||
if provider not in MODELS:
|
||||
return JSONResponse({"error": "Unknown model"}, status_code=400)
|
||||
|
||||
message_id = str(uuid.uuid4())
|
||||
PENDING[message_id] = (chat_id, provider)
|
||||
CHATS[chat_id].append({"role": "human", "content": user_text})
|
||||
|
||||
return JSONResponse({
|
||||
"status": "queued",
|
||||
"message_id": message_id
|
||||
})
|
||||
|
||||
async def chat_stream(request):
|
||||
"""GET /chats/{chat_id}/stream?message_id=<chat_id>"""
|
||||
chat_id = request.path_params["chat_id"]
|
||||
message_id = request.query_params.get("message_id")
|
||||
|
||||
if chat_id not in CHATS or message_id not in PENDING:
|
||||
return JSONResponse({"error": "Not found"}, status_code=404)
|
||||
|
||||
chat_id_from_map, provider = PENDING.pop(message_id)
|
||||
assert chat_id == chat_id_from_map
|
||||
|
||||
msgs = get_messages(CHATS, chat_id)
|
||||
llm = get_llm(provider)
|
||||
|
||||
async def event_generator():
|
||||
async for chunk, _ in app_graph.astream(
|
||||
{"messages": input_messages},
|
||||
config,
|
||||
stream_mode="messages",
|
||||
):
|
||||
if isinstance(chunk, AIMessage):
|
||||
yield dict(data=chunk.content)
|
||||
buffer = ""
|
||||
async for chunk in llm.astream(msgs):
|
||||
token = chunk.content
|
||||
buffer += token
|
||||
yield {"data": token}
|
||||
# Finished: store assistant reply
|
||||
CHATS[chat_id].append({"role": "assistant", "content": buffer})
|
||||
yield {"event": "done", "data": ""}
|
||||
|
||||
return EventSourceResponse(event_generator())
|
||||
|
|
@ -15,4 +15,6 @@ dependencies = [
|
|||
"sse-starlette>=2.4.1",
|
||||
"langchain-openai>=0.3.28",
|
||||
"langgraph>=0.5.4",
|
||||
"langgraph-checkpoint-sqlite>=2.0.11",
|
||||
"aiosqlite>=0.21.0",
|
||||
]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue