database support

This commit is contained in:
Sebarocks 2025-08-04 14:05:54 -04:00
parent f7b23a3cec
commit c49636c766
10 changed files with 780 additions and 860 deletions

5
.gitignore vendored
View file

@ -9,4 +9,7 @@ wheels/
# Virtual environments
.venv
.python-version
.env
.env
# Databases
*.sqlite3

View file

@ -1,82 +0,0 @@
# ChatSBT - Multi-Model Chat Application
A modern chat application supporting multiple AI models through OpenRouter API.
## Features
- Chat with multiple AI models (Qwen, Deepseek, Kimi)
- Real-time streaming responses
- Conversation history
- Simple REST API backend
- Modern Svelte frontend
## Tech Stack
### Frontend
- Svelte
- DaisyUI (Tailwind component library)
- Vite
### Backend
- Starlette (async Python web framework)
- LangChain (LLM orchestration)
- LangGraph (for potential future agent workflows)
- OpenRouter API (multi-model provider)
## API Endpoints
| Method | Path | Description |
|--------|------|-------------|
| POST | /chats | Create new chat session |
| GET | /chats/{chat_id} | Get chat history |
| POST | /chats/{chat_id}/messages | Post new message |
| GET | /chats/{chat_id}/stream | Stream response from AI |
## Prerequisites
- Python 3.11+
- Deno
- UV (Python package manager)
- OpenRouter API key (set in `.env` file)
## Installation
1. Clone the repository
2. Set up environment variables:
```bash
echo "OPENROUTER_API_KEY=your_key_here" > .env
echo "OPENROUTER_BASE_URL=https://openrouter.ai/api/v1" >> .env
```
4. Install frontend dependencies:
```bash
cd chatsbt
deno install
```
## Running
1. Start backend server:
```bash
uv run app.py
```
2. Start the frontend (another terminal):
```bash
cd chatsbt
deno run dev
```
The application will be available at `http://localhost:5173`
## Configuration
Available models:
- `qwen/qwen3-235b-a22b-2507`
- `deepseek/deepseek-r1-0528`
- `moonshotai/kimi-k2`

View file

@ -15,4 +15,5 @@ def get_llm(provider: str):
)
def get_messages(chats, chat_id):
return [HumanMessage(**m) if m["role"] == "human" else AIMessage(**m) for m in chats[chat_id]]
print(chats)
return [HumanMessage(**m) if m["role"] == "human" else AIMessage(**m) for m in chats]

1
config/__init__.py Normal file
View file

@ -0,0 +1 @@
# Masonite-orm module

11
config/database.py Normal file
View file

@ -0,0 +1,11 @@
from masoniteorm.connections import ConnectionResolver
DATABASES = {
"default": "sqlite",
"sqlite": {
"driver": "sqlite",
"database": "database.sqlite3",
}
}
DB = ConnectionResolver().set_connection_details(DATABASES)

View file

@ -1,12 +1,13 @@
import uuid
import json
from typing import Dict, List, Tuple
from starlette.responses import JSONResponse
from starlette.requests import Request
from sse_starlette.sse import EventSourceResponse
from chatgraph import get_messages, get_llm
from models.Chat import Chat
CHATS: Dict[str, List[dict]] = {} # chat_id -> messages
PENDING: Dict[str, Tuple[str, str]] = {} # message_id -> (chat_id, provider)
MODELS = {
@ -32,16 +33,22 @@ async def create_chat(request: Request):
provider = body.get("model","")
if provider not in MODELS:
return JSONResponse({"error": "Unknown model"}, status_code=400)
chat_id = str(uuid.uuid4())[:8]
CHATS[chat_id] = []
chat = Chat()
chat_id = str(uuid.uuid4())
chat.id = chat_id
chat.title = "New Chat"
chat.messages = json.dumps([])
chat.save()
return JSONResponse({"id": chat_id, "model": provider})
async def history(request : Request):
"""GET /chats/{chat_id} -> previous messages"""
chat_id = request.path_params["chat_id"]
if chat_id not in CHATS:
chat = Chat.find(chat_id)
if not chat:
return JSONResponse({"error": "Not found"}, status_code=404)
return JSONResponse({"messages": CHATS[chat_id]})
messages = json.loads(chat.messages) if chat.messages else []
return JSONResponse({"messages": messages})
async def post_message(request: Request):
"""POST /chats/{chat_id}/messages
@ -49,7 +56,8 @@ async def post_message(request: Request):
Returns: {"message_id": "<chat_id>"}
"""
chat_id = request.path_params["chat_id"]
if chat_id not in CHATS:
chat = Chat.find(chat_id)
if not chat:
return JSONResponse({"error": "Chat not found"}, status_code=404)
body = await request.json()
@ -58,9 +66,14 @@ async def post_message(request: Request):
if provider not in MODELS:
return JSONResponse({"error": "Unknown model"}, status_code=400)
# Load existing messages and add the new user message
messages = json.loads(chat.messages) if chat.messages else []
messages.append({"role": "human", "content": user_text})
chat.messages = json.dumps(messages)
chat.save()
message_id = str(uuid.uuid4())
PENDING[message_id] = (chat_id, provider)
CHATS[chat_id].append({"role": "human", "content": user_text})
return JSONResponse({
"status": "queued",
@ -72,13 +85,18 @@ async def chat_stream(request):
chat_id = request.path_params["chat_id"]
message_id = request.query_params.get("message_id")
if chat_id not in CHATS or message_id not in PENDING:
if message_id not in PENDING:
return JSONResponse({"error": "Not found"}, status_code=404)
chat_id_from_map, provider = PENDING.pop(message_id)
assert chat_id == chat_id_from_map
msgs = get_messages(CHATS, chat_id)
chat = Chat.find(chat_id)
if not chat:
return JSONResponse({"error": "Chat not found"}, status_code=404)
messages = json.loads(chat.messages) if chat.messages else []
msgs = get_messages( messages , chat_id)
llm = get_llm(provider)
async def event_generator():
@ -88,7 +106,10 @@ async def chat_stream(request):
buffer += token
yield {"data": token}
# Finished: store assistant reply
CHATS[chat_id].append({"role": "assistant", "content": buffer})
messages.append({"role": "assistant", "content": buffer})
chat.messages = json.dumps(messages)
chat.save()
yield {"event": "done", "data": ""}
return EventSourceResponse(event_generator())

View file

@ -0,0 +1,12 @@
from masoniteorm.migrations import Migration
class CreateChatsTable(Migration):
def up(self):
with self.schema.create("chats") as table:
table.uuid("id").primary()
table.string("title")
table.json("messages")
def down(self):
self.schema.drop("chats")

10
models/Chat.py Normal file
View file

@ -0,0 +1,10 @@
from masoniteorm.models import Model
from masoniteorm.scopes import UUIDPrimaryKeyMixin
class Chat(Model, UUIDPrimaryKeyMixin):
__table__ = "chats"
__timestamps__ = False
__primary_key__ = "id"
__incrementing__ = False
__fillable__ = ["id", "title", "messages"]

View file

@ -8,13 +8,11 @@ dependencies = [
"jinja2>=3.1.6",
"langchain>=0.3.26",
"langchain-core>=0.3.68",
"langchain-openai>=0.3.28",
"starlette>=0.47.1",
"uvicorn>=0.35.0",
"python-dotenv>=1.1.1",
"websockets>=15.0.1",
"sse-starlette>=2.4.1",
"langchain-openai>=0.3.28",
"langgraph>=0.5.4",
"langgraph-checkpoint-sqlite>=2.0.11",
"aiosqlite>=0.21.0",
"masonite-orm>=3.0.0",
]

1469
uv.lock generated

File diff suppressed because it is too large Load diff