Spaces:
Running
Running
Update main.py
Browse files
main.py
CHANGED
@@ -3,7 +3,7 @@ import os
|
|
3 |
import streamlit as st
|
4 |
import anthropic
|
5 |
|
6 |
-
|
7 |
from langchain_community.embeddings import HuggingFaceBgeEmbeddings
|
8 |
from langchain_community.vectorstores import SupabaseVectorStore
|
9 |
from langchain_community.llms import HuggingFaceEndpoint
|
@@ -26,21 +26,12 @@ username = st.secrets.username
|
|
26 |
supabase: Client = create_client(supabase_url, supabase_key)
|
27 |
logger = get_logger(__name__)
|
28 |
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
model_name=model_name,
|
34 |
-
model_kwargs=model_kwargs,
|
35 |
-
encode_kwargs=encode_kwargs
|
36 |
)
|
37 |
|
38 |
-
# embeddings = HuggingFaceInferenceAPIEmbeddings(
|
39 |
-
# api_key=hf_api_key,
|
40 |
-
# model_name="BAAI/bge-large-en-v1.5",
|
41 |
-
# api_url="https://router.huggingface.co/hf-inference/pipeline/feature-extraction/",
|
42 |
-
# )
|
43 |
-
|
44 |
if 'chat_history' not in st.session_state:
|
45 |
st.session_state['chat_history'] = []
|
46 |
|
|
|
3 |
import streamlit as st
|
4 |
import anthropic
|
5 |
|
6 |
+
from langchain_community.embeddings import HuggingFaceInferenceAPIEmbeddings
|
7 |
from langchain_community.embeddings import HuggingFaceBgeEmbeddings
|
8 |
from langchain_community.vectorstores import SupabaseVectorStore
|
9 |
from langchain_community.llms import HuggingFaceEndpoint
|
|
|
26 |
supabase: Client = create_client(supabase_url, supabase_key)
|
27 |
logger = get_logger(__name__)
|
28 |
|
29 |
+
embeddings = HuggingFaceInferenceAPIEmbeddings(
|
30 |
+
api_key=hf_api_key,
|
31 |
+
model_name="BAAI/bge-large-en-v1.5",
|
32 |
+
api_url="https://router.huggingface.co/hf-inference/pipeline/feature-extraction/",
|
|
|
|
|
|
|
33 |
)
|
34 |
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
if 'chat_history' not in st.session_state:
|
36 |
st.session_state['chat_history'] = []
|
37 |
|