Initial
This commit is contained in:
commit
1dec5d49ec
6 changed files with 1056 additions and 0 deletions
49
main.py
Normal file
49
main.py
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
import os
|
||||
from dotenv import load_dotenv
|
||||
from starlette.applications import Starlette
|
||||
from starlette.routing import Route, WebSocketRoute, Mount
|
||||
from starlette.staticfiles import StaticFiles
|
||||
from starlette.templating import Jinja2Templates
|
||||
from starlette.websockets import WebSocket, WebSocketDisconnect
|
||||
from langchain_ollama import ChatOllama
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# Initialize templates
|
||||
templates = Jinja2Templates(directory="templates")
|
||||
# Use ChatOpenAI or any other chat model
|
||||
llm = ChatOllama(model="qwen3:4b", reasoning=False)
|
||||
|
||||
messages = [
|
||||
("system", "You are a helpful assistant"),
|
||||
]
|
||||
|
||||
async def homepage(request):
|
||||
return templates.TemplateResponse("index.html", {"request": request})
|
||||
|
||||
async def websocket_endpoint(websocket: WebSocket):
|
||||
await websocket.accept()
|
||||
try:
|
||||
while True:
|
||||
message = await websocket.receive_text()
|
||||
messages.append(message)
|
||||
|
||||
# Stream the response
|
||||
for chunk in llm.stream(messages):
|
||||
print(chunk)
|
||||
await websocket.send_text(chunk.text())
|
||||
|
||||
except WebSocketDisconnect:
|
||||
print("Client disconnected")
|
||||
|
||||
routes = [
|
||||
Route("/", homepage),
|
||||
WebSocketRoute("/ws", websocket_endpoint),
|
||||
Mount("/static", StaticFiles(directory="static"), name="static"),
|
||||
]
|
||||
|
||||
app = Starlette(debug=True, routes=routes)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
Loading…
Add table
Add a link
Reference in a new issue