Spaces:
Sleeping
Sleeping
from dotenv import load_dotenv | |
load_dotenv() | |
import chainlit as cl | |
from langchain_openai import ChatOpenAI | |
# We will set streaming=True so that we can stream tokens | |
model = ChatOpenAI(temperature=0, streaming=True) | |
from langchain_core.messages import HumanMessage | |
async def run_convo(message: cl.Message): | |
response = model.invoke([HumanMessage(content=message.content)]) | |
await cl.Message(content=response.content).send() | |