Upload app.py
Browse files
app.py
CHANGED
|
@@ -1,103 +1,77 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
from streamlit_chat import message
|
| 3 |
-
from langchain_openai import ChatOpenAI
|
| 4 |
-
from langchain.chains import ConversationChain
|
| 5 |
-
from langchain.chains.conversation.memory import (ConversationBufferMemory,
|
| 6 |
-
ConversationSummaryMemory,
|
| 7 |
-
ConversationBufferWindowMemory
|
| 8 |
-
|
| 9 |
-
)
|
| 10 |
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
if 'API_Key' not in st.session_state:
|
| 16 |
-
st.session_state['API_Key'] =''
|
| 17 |
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
st.markdown("<h4 style='text-align: center;'>A cutting-edge language model</h4>", unsafe_allow_html=True)
|
| 22 |
-
st.markdown("<p style='text-align: right'>By <a href='https://entzyeung.github.io/portfolio/index.html'>Lorentz Yeung</a></p>", unsafe_allow_html=True)
|
| 23 |
|
| 24 |
-
|
| 25 |
-
st.session_state['API_Key']= st.text_input("First, to get it work, put your OpenAI API Key here please, the system will enter for you automatically.",type="password")
|
| 26 |
-
st.markdown("<p style='text-align: left;'>Then Tell me how I can help:</p>", unsafe_allow_html=True)
|
| 27 |
|
| 28 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29 |
|
| 30 |
-
#
|
| 31 |
-
# st.sidebar.text_input() will automatically update st.session_state['API_Key'] with the input value whenever the user types into the field.
|
| 32 |
-
st.sidebar.title("Introduction")
|
| 33 |
-
st.sidebar.markdown("""
|
| 34 |
-
ChatMate is an advanced conversational AI interface, expertly crafted to demonstrate the fusion of Streamlit's user-friendly design and OpenAI's powerful GPT-3.5 model. Here are its highlights:
|
| 35 |
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
<li><strong>State Management</strong>: Utilizes <code>ConversationChain</code> and <code>ConversationMemory</code> from <code>langchain</code> to preserve the context and flow, ensuring coherent and engaging interactions.</li>
|
| 40 |
-
<li><strong>Python Proficiency</strong>: The app's robust backend, written in Python, reflects the data scientist’s adeptness in programming and system design.</li>
|
| 41 |
-
<li><strong>Secure Interaction</strong>: Streamlit's session state management is used for secure API key handling and user input retention across sessions.</li>
|
| 42 |
-
</ul>
|
| 43 |
|
| 44 |
-
ChatMate is developed by Lorentz Yeung
|
| 45 |
-
""", unsafe_allow_html=True)
|
| 46 |
|
| 47 |
-
#st.session_state['API_Key']= st.sidebar.text_input("Put your OpenAI API Key here please, the system will enter for you automatically.",type="password")
|
| 48 |
|
| 49 |
-
|
| 50 |
-
#if summarise_button:
|
| 51 |
-
# summarise_placeholder = st.sidebar.write("Nice chatting with you my friend ❤️")
|
| 52 |
|
|
|
|
|
|
|
| 53 |
|
|
|
|
|
|
|
|
|
|
| 54 |
|
| 55 |
-
|
| 56 |
-
def getresponse(userInput, api_key):
|
| 57 |
|
| 58 |
-
|
|
|
|
| 59 |
|
| 60 |
-
|
| 61 |
-
temperature=0,
|
| 62 |
-
openai_api_key=api_key,
|
| 63 |
-
model_name='gpt-3.5-turbo'
|
| 64 |
-
)
|
| 65 |
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
verbose=True,
|
| 69 |
-
memory=ConversationSummaryMemory(llm=llm)
|
| 70 |
-
)
|
| 71 |
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
| 75 |
|
| 76 |
-
|
|
|
|
| 77 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 78 |
|
|
|
|
|
|
|
| 79 |
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
container = st.container()
|
| 83 |
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
with st.form(key='my_form', clear_on_submit=True):
|
| 87 |
-
user_input = st.text_area("Ask me questions please", key='input', height=100)
|
| 88 |
-
submit_button = st.form_submit_button(label='Send')
|
| 89 |
|
| 90 |
-
|
| 91 |
-
st.session_state['
|
| 92 |
-
model_response=getresponse(user_input,st.session_state['API_Key'])
|
| 93 |
-
st.session_state['messages'].append(model_response)
|
| 94 |
-
|
| 95 |
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
| 102 |
|
| 103 |
-
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
from streamlit_chat import message
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
|
| 4 |
+
from langchain.document_loaders import CSVLoader
|
| 5 |
+
from langchain_openai import OpenAIEmbeddings
|
| 6 |
+
from langchain.chains import RetrievalQA
|
| 7 |
+
from langchain.chains import ConversationalRetrievalChain
|
|
|
|
|
|
|
| 8 |
|
| 9 |
+
from langchain_openai import ChatOpenAI
|
| 10 |
+
import os
|
| 11 |
+
from langchain_community.vectorstores import Chroma
|
|
|
|
|
|
|
| 12 |
|
| 13 |
+
import tempfile
|
|
|
|
|
|
|
| 14 |
|
| 15 |
|
| 16 |
+
user_api_key = st.sidebar.text_input(
|
| 17 |
+
label="#### Your OpenAI API key 👇",
|
| 18 |
+
placeholder="user_historye your openAI API key, sk-",
|
| 19 |
+
type="password")
|
| 20 |
|
| 21 |
+
# uploaded_file = st.sidebar.file_uploader("upload", type="csv")
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
|
| 23 |
+
persist_directory = "chroma/db"
|
| 24 |
+
embeddings = OpenAIEmbeddings()
|
| 25 |
+
KaggleX_courses_db = Chroma(persist_directory = persist_directory, embedding_function=embeddings)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
|
|
|
|
|
|
|
| 27 |
|
|
|
|
| 28 |
|
| 29 |
+
if KaggleX_courses_db :
|
|
|
|
|
|
|
| 30 |
|
| 31 |
+
KaggleX_courses_db = Chroma(persist_directory = persist_directory, embedding_function=embeddings)
|
| 32 |
+
retriever = KaggleX_courses_db.as_retriever() # search_kwargs={"k": 4}
|
| 33 |
|
| 34 |
+
chain = ConversationalRetrievalChain.from_llm(llm = ChatOpenAI(temperature=0.0,model_name='gpt-3.5-turbo',
|
| 35 |
+
openai_api_key=user_api_key),
|
| 36 |
+
retriever = retriever)
|
| 37 |
|
| 38 |
+
def conversational_chat(query):
|
|
|
|
| 39 |
|
| 40 |
+
result = chain({"question": query, "chat_history": st.session_state['history']})
|
| 41 |
+
st.session_state['history'].append((query, result["answer"]))
|
| 42 |
|
| 43 |
+
return result["answer"]
|
|
|
|
|
|
|
|
|
|
|
|
|
| 44 |
|
| 45 |
+
if 'history' not in st.session_state:
|
| 46 |
+
st.session_state['history'] = []
|
|
|
|
|
|
|
|
|
|
| 47 |
|
| 48 |
+
if 'ai_history' not in st.session_state:
|
| 49 |
+
st.session_state['ai_history'] = ["Hello ! Ask me anything about KaggleX courses!"]
|
|
|
|
| 50 |
|
| 51 |
+
if 'user_history' not in st.session_state:
|
| 52 |
+
st.session_state['user_history'] = ["I would like to know more about the KaggleX courses!]
|
| 53 |
|
| 54 |
+
#container for the chat history
|
| 55 |
+
response_container = st.container()
|
| 56 |
+
#container for the user's text input
|
| 57 |
+
container = st.container()
|
| 58 |
|
| 59 |
+
with container:
|
| 60 |
+
with st.form(key='my_form', clear_on_submit=True):
|
| 61 |
|
| 62 |
+
user_input = st.text_input("Query:", placeholder="Learn more about the courses in KaggleX:", key='input')
|
| 63 |
+
submit_button = st.form_submit_button(label='Ask')
|
|
|
|
| 64 |
|
| 65 |
+
if submit_button and user_input:
|
| 66 |
+
output = conversational_chat(user_input) # if the button is clicked, then submit he query to the Chain, and take the history from session_state.
|
|
|
|
|
|
|
|
|
|
| 67 |
|
| 68 |
+
st.session_state['user_history'].append(user_input) # store the user input to user history
|
| 69 |
+
st.session_state['ai_history'].append(output) # store the AI prediction to ai history
|
|
|
|
|
|
|
|
|
|
| 70 |
|
| 71 |
+
# the chat interface.
|
| 72 |
+
if st.session_state['ai_history']:
|
| 73 |
+
with response_container:
|
| 74 |
+
for i in range(len(st.session_state['ai_history'])):
|
| 75 |
+
message(st.session_state["user_history"][i], is_user=True, key=str(i) + '_user', avatar_style="big-smile")
|
| 76 |
+
message(st.session_state["ai_history"][i], key=str(i), avatar_style="thumbs")
|
| 77 |
|
|
|