Spaces:
Runtime error
Runtime error
SelfQueryRetriever pass #1
Browse files
app.py
CHANGED
@@ -1,28 +1,55 @@
|
|
1 |
# https://towardsai.net/p/machine-learning/deploying-a-langchain-large-language-model-llm-with-streamlit-pinecone?amp=1
|
2 |
|
3 |
"""Python file to serve as the frontend"""
|
|
|
|
|
4 |
import streamlit as st
|
5 |
from streamlit_chat import message
|
6 |
-
|
7 |
-
import pinecone
|
8 |
-
import os
|
9 |
from langchain.embeddings.openai import OpenAIEmbeddings
|
10 |
from langchain.vectorstores import Pinecone
|
11 |
-
|
12 |
from langchain.chains import ConversationChain
|
13 |
-
from langchain.
|
14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
embeddings = OpenAIEmbeddings()
|
16 |
|
17 |
pinecone.init(
|
18 |
api_key=str(os.environ['PINECONE_API_KEY']),
|
19 |
environment=str(os.environ['PINECONE_ENV']))
|
20 |
-
|
21 |
index_name = str(os.environ['PINECONE_INDEX_NAME'])
|
22 |
|
23 |
def load_chain():
|
24 |
-
|
25 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
|
27 |
# def load_chain():
|
28 |
# """Logic for loading the chain you want to use should go here."""
|
@@ -51,7 +78,7 @@ def get_text():
|
|
51 |
user_input = get_text()
|
52 |
|
53 |
if user_input:
|
54 |
-
docs = chain.
|
55 |
output = docs[0].page_content
|
56 |
|
57 |
st.session_state.past.append(user_input)
|
|
|
1 |
# https://towardsai.net/p/machine-learning/deploying-a-langchain-large-language-model-llm-with-streamlit-pinecone?amp=1
|
2 |
|
3 |
"""Python file to serve as the frontend"""
|
4 |
+
import os
|
5 |
+
import pinecone
|
6 |
import streamlit as st
|
7 |
from streamlit_chat import message
|
8 |
+
from langchain.llms import OpenAI
|
|
|
|
|
9 |
from langchain.embeddings.openai import OpenAIEmbeddings
|
10 |
from langchain.vectorstores import Pinecone
|
|
|
11 |
from langchain.chains import ConversationChain
|
12 |
+
from langchain.retrievers.self_query.base import SelfQueryRetriever
|
13 |
+
from langchain.chains.query_constructor.base import AttributeInfo
|
14 |
+
|
15 |
+
metadata_field_info=[
|
16 |
+
AttributeInfo(
|
17 |
+
name="author",
|
18 |
+
description="The author of the excerpt",
|
19 |
+
type="string or list[string]",
|
20 |
+
),
|
21 |
+
AttributeInfo(
|
22 |
+
name="chapter_number",
|
23 |
+
description="The chapter number of excerpt",
|
24 |
+
type="integer",
|
25 |
+
),
|
26 |
+
AttributeInfo(
|
27 |
+
name="chapter_name",
|
28 |
+
description="The chapter name of the excerpt",
|
29 |
+
type="string",
|
30 |
+
),
|
31 |
+
]
|
32 |
+
|
33 |
+
document_content_description = "Excerpt's from Reid Hoffman's book Impromptu"
|
34 |
embeddings = OpenAIEmbeddings()
|
35 |
|
36 |
pinecone.init(
|
37 |
api_key=str(os.environ['PINECONE_API_KEY']),
|
38 |
environment=str(os.environ['PINECONE_ENV']))
|
|
|
39 |
index_name = str(os.environ['PINECONE_INDEX_NAME'])
|
40 |
|
41 |
def load_chain():
|
42 |
+
docsearch = Pinecone.from_existing_index(index_name, embeddings)
|
43 |
+
retriever = SelfQueryRetriever.from_llm(
|
44 |
+
llm,
|
45 |
+
vectorstore,
|
46 |
+
document_content_description,
|
47 |
+
metadata_field_info,
|
48 |
+
verbose=True)
|
49 |
+
return retriever
|
50 |
+
|
51 |
+
# docsearch = Pinecone.from_existing_index(index_name, embeddings)
|
52 |
+
# return docsearch
|
53 |
|
54 |
# def load_chain():
|
55 |
# """Logic for loading the chain you want to use should go here."""
|
|
|
78 |
user_input = get_text()
|
79 |
|
80 |
if user_input:
|
81 |
+
docs = chain.get_relevant_documents(user_input)
|
82 |
output = docs[0].page_content
|
83 |
|
84 |
st.session_state.past.append(user_input)
|