Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -15,13 +15,13 @@ from langchain.smith import RunEvalConfig, run_on_dataset
|
|
| 15 |
# Load API Keys From the .env File & Load the OpenAI, Pinecone, and LangSmith Client
|
| 16 |
#------------------------------------------------------------------------
|
| 17 |
|
| 18 |
-
#
|
| 19 |
-
|
| 20 |
-
#
|
| 21 |
-
|
| 22 |
|
| 23 |
-
os.environ["OPENAI_API_KEY"] = st.secrets["OPENAI_API_KEY"]
|
| 24 |
-
openai.api_key = os.getenv("OPENAI_API_KEY")
|
| 25 |
|
| 26 |
# # Fetch Pinecone API key and environment from Streamlit secrets
|
| 27 |
PINECONE_API_KEY = st.secrets["PINECONE_API_KEY"]
|
|
@@ -48,8 +48,8 @@ index_name = 'mimtssinkqa'
|
|
| 48 |
|
| 49 |
# Initialize the OpenAI embeddings object
|
| 50 |
from langchain_openai import OpenAIEmbeddings
|
| 51 |
-
|
| 52 |
-
embeddings = OpenAIEmbeddings()
|
| 53 |
|
| 54 |
|
| 55 |
# LOAD VECTOR STORE FROM EXISTING INDEX
|
|
@@ -63,8 +63,8 @@ def ask_with_memory(vector_store, query, chat_history=[]):
|
|
| 63 |
|
| 64 |
from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
|
| 65 |
|
| 66 |
-
|
| 67 |
-
llm = ChatOpenAI(model_name='gpt-3.5-turbo', temperature=0.5)
|
| 68 |
|
| 69 |
retriever = vector_store.as_retriever(search_type='similarity', search_kwargs={'k': 3})
|
| 70 |
|
|
|
|
| 15 |
# Load API Keys From the .env File & Load the OpenAI, Pinecone, and LangSmith Client
|
| 16 |
#------------------------------------------------------------------------
|
| 17 |
|
| 18 |
+
# Fetch the OpenAI API key from Streamlit secrets
|
| 19 |
+
OPENAI_API_KEY = st.secrets["OPENAI_API_KEY"]
|
| 20 |
+
# Retrieve the OpenAI API Key from secrets
|
| 21 |
+
openai.api_key = st.secrets["OPENAI_API_KEY"]
|
| 22 |
|
| 23 |
+
# os.environ["OPENAI_API_KEY"] = st.secrets["OPENAI_API_KEY"]
|
| 24 |
+
# openai.api_key = os.getenv("OPENAI_API_KEY")
|
| 25 |
|
| 26 |
# # Fetch Pinecone API key and environment from Streamlit secrets
|
| 27 |
PINECONE_API_KEY = st.secrets["PINECONE_API_KEY"]
|
|
|
|
| 48 |
|
| 49 |
# Initialize the OpenAI embeddings object
|
| 50 |
from langchain_openai import OpenAIEmbeddings
|
| 51 |
+
embeddings = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY)
|
| 52 |
+
# embeddings = OpenAIEmbeddings()
|
| 53 |
|
| 54 |
|
| 55 |
# LOAD VECTOR STORE FROM EXISTING INDEX
|
|
|
|
| 63 |
|
| 64 |
from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
|
| 65 |
|
| 66 |
+
llm = ChatOpenAI(model_name='gpt-3.5-turbo', temperature=0.5, openai_api_key=OPENAI_API_KEY)
|
| 67 |
+
# llm = ChatOpenAI(model_name='gpt-3.5-turbo', temperature=0.5)
|
| 68 |
|
| 69 |
retriever = vector_store.as_retriever(search_type='similarity', search_kwargs={'k': 3})
|
| 70 |
|