AILanguageCompanion / app_chainlit-2nd.py
koura718's picture
Add chainlit jp
bc72193
raw
history blame contribute delete
448 Bytes
from dotenv import load_dotenv
load_dotenv()
import chainlit as cl
from langchain_openai import ChatOpenAI
# We will set streaming=True so that we can stream tokens
model = ChatOpenAI(temperature=0, streaming=True)
from langchain_core.messages import HumanMessage
@cl.on_message
async def run_convo(message: cl.Message):
response = model.invoke([HumanMessage(content=message.content)])
await cl.Message(content=response.content).send()