File size: 448 Bytes
bc72193
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
from dotenv import load_dotenv
load_dotenv()
import chainlit as cl

from langchain_openai import ChatOpenAI

# We will set streaming=True so that we can stream tokens
model = ChatOpenAI(temperature=0, streaming=True)

from langchain_core.messages import HumanMessage

@cl.on_message
async def run_convo(message: cl.Message):
    response = model.invoke([HumanMessage(content=message.content)])
    await cl.Message(content=response.content).send()