|
import streamlit as st |
|
import pinecone |
|
from makechain import get_chain |
|
from langchain.vectorstores.pinecone import Pinecone |
|
from env import PINECONE_INDEX_NAME, PINECONE_ENVIRONMENT, PINECONE_API_KEY, OPENAI_API_KEY |
|
from langchain.embeddings.openai import OpenAIEmbeddings |
|
|
|
|
|
st.title("Ask the Black@Stanford Exhibit") |
|
st.sidebar.header("You can ask questions of interviews with Black Stanford students and faculty from the University " |
|
"Archives") |
|
st.sidebar.info( |
|
'''This is a web application that allows you to interact with |
|
the Stanford Archives. |
|
Enter a **Question** in the **text box** and **press enter** to receive |
|
a **response** from our ChatBot. |
|
''' |
|
) |
|
|
|
|
|
pinecone.init( |
|
api_key=st.secrets["PINECONE_API_KEY"], |
|
environment=st.secrets["PINECONE_ENVIRONMENT"] |
|
) |
|
index = pinecone.Index(index_name=st.secrets["PINECONE_INDEX_NAME"]) |
|
embed = OpenAIEmbeddings(openai_api_key=st.secrets["OPENAI_API_KEY"]) |
|
text_field = "text" |
|
vectorStore = Pinecone( |
|
index, embed.embed_query, text_field |
|
) |
|
|
|
|
|
qa_chain = get_chain(vectorStore) |
|
|
|
def main(): |
|
global query |
|
|
|
user_query= st.text_input("Enter your question here") |
|
if user_query != ":q" or user_query != "": |
|
|
|
query = user_query.strip().replace('\n', ' ') |
|
response = qa_chain( |
|
{ |
|
'question': query, |
|
} |
|
) |
|
st.write(f"{response['answer']}") |
|
st.write("Sources: ") |
|
st.write(f"{response['sources']}") |
|
|
|
try: |
|
main() |
|
except Exception as e: |
|
st.write("An error occurred while running the application: ", e) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|