File size: 1,501 Bytes
f12ca33
161690c
f12ca33
 
 
 
 
 
 
 
 
 
 
 
e8ab64e
f12ca33
161690c
 
 
f12ca33
 
 
 
 
161690c
 
f12ca33
 
161690c
 
f12ca33
161690c
f12ca33
 
161690c
 
f12ca33
 
 
161690c
f12ca33
161690c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43

import streamlit as st
from langchain.chat_models import ChatOpenAI
from langchain.schema import SystemMessage, HumanMessage, AIMessage

# From here down is all the StreamLit UI.
st.set_page_config(page_title="Entz's LLM LangChain-OpenAI", page_icon=":ant:")
st.header("Role Play: A 5-Years Old Cute Girl")

# put a presumptions for ai to the streamlit session state
# st.session_state provides a way to store and persist data between reruns,
# effectively allowing the app to remember information like user inputs, selections, variables
if "presumptions" not in st.session_state:
     st.session_state.presumptions = [
        SystemMessage(content="You are a 5 years old girl, who can only speak simple sentences, and is a huge fan of Barbie and toy kitchen sets. You have only received Kindergarten, and preschool education")
    ]

def load_answer(question):

    st.session_state.presumptions.append(HumanMessage(content=question))
    assistant_answer  = chat(st.session_state.presumptions )
    # store the new answer the presumption list
    st.session_state.presumptions.append(AIMessage(content=assistant_answer.content))
    return assistant_answer.content


def get_text():
    input_text = st.text_input("Ask me question please~ : ", key= input)
    return input_text

chat = ChatOpenAI(temperature=0)

user_input=get_text()
submit = st.button('Little girl answers: ')  

if submit:
    
    response = load_answer(user_input)
    st.subheader("Answer:")

    st.write(response,key= 1)