hostserver3 / main.py
abdullahalioo's picture
Update main.py
62af7fd verified
raw
history blame
1.64 kB
from fastapi import FastAPI
from pydantic import BaseModel
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse
from hugchat import hugchat
from hugchat.login import Login
import asyncio
import os
from dotenv import load_dotenv
# Load environment variables from .env file
load_dotenv()
# Read credentials from environment variables
EMAIL = os.getenv("EMAIL")
PASSWD = os.getenv("PASSWD")
cookies = sign.login(cookie_dir_path="cookies", save_cookies=True)
# Cookie storage
cookie_path_dir = "./cookies/"
os.makedirs(cookie_path_dir, exist_ok=True)
# HugChat login
sign = Login(EMAIL, PASSWD)
cookies = sign.login(cookie_dir_path=cookie_path_dir, save_cookies=True)
# Create chatbot instance
chatbot = hugchat.ChatBot(cookies=cookies.get_dict())
# Optional: Use assistant ID
ASSISTANT_ID = "66017fca58d60bd7d5c5c26c" # Replace if needed
chatbot.new_conversation(assistant=ASSISTANT_ID, switch_to=True)
# FastAPI setup
app = FastAPI()
# Enable CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Request model
class Question(BaseModel):
question: str
# Token stream function
async def generate_response_stream(prompt: str):
for chunk in chatbot.chat(prompt, stream=True):
token = chunk.get("token", "")
if token:
yield token
await asyncio.sleep(0.02)
# Endpoint
@app.post("/ask")
async def ask(question: Question):
return StreamingResponse(
generate_response_stream(question.question),
media_type="text/plain"
)