Spaces:
Running
Running
update from A7
Browse files- .streamlit/main.css +6 -0
- requirements.txt +21 -2
- semsearch.py +24 -69
- semsearchDbgUI.py +61 -18
.streamlit/main.css
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* style CSS for main streamlit page. */
|
2 |
+
.main {
|
3 |
+
max-width: 100%;
|
4 |
+
margin: 0 auto;
|
5 |
+
padding-top: 50px;
|
6 |
+
}
|
requirements.txt
CHANGED
@@ -1,4 +1,23 @@
|
|
1 |
streamlit
|
2 |
-
numpy
|
3 |
-
pandas
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
|
|
|
1 |
streamlit
|
2 |
+
#numpy
|
3 |
+
#pandas
|
4 |
+
|
5 |
+
weaviate-client==4.*
|
6 |
+
sentence-transformers
|
7 |
+
langchain
|
8 |
+
langchain_community
|
9 |
+
lxml
|
10 |
+
beautifulsoup4
|
11 |
+
|
12 |
+
transformers==4.34.1
|
13 |
+
fastapi==0.103.2
|
14 |
+
uvicorn==0.23.2
|
15 |
+
nltk==3.8.1
|
16 |
+
torch==2.0.1
|
17 |
+
sentencepiece==0.1.99
|
18 |
+
sentence-transformers==2.2.2
|
19 |
+
optimum==1.13.2
|
20 |
+
onnxruntime==1.16.1
|
21 |
+
onnx==1.14.1
|
22 |
+
|
23 |
|
semsearch.py
CHANGED
@@ -17,8 +17,7 @@ import logging
|
|
17 |
import llama_cpp
|
18 |
from llama_cpp import Llama
|
19 |
|
20 |
-
import
|
21 |
-
from IPython.display import display, clear_output
|
22 |
|
23 |
|
24 |
weaviate_logger = logging.getLogger("httpx")
|
@@ -27,6 +26,16 @@ weaviate_logger.setLevel(logging.WARNING)
|
|
27 |
logger = logging.getLogger(__name__)
|
28 |
logging.basicConfig(level=logging.INFO)
|
29 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
|
31 |
|
32 |
######################################################################
|
@@ -43,58 +52,6 @@ webpageChunks = []
|
|
43 |
webpageTitles = []
|
44 |
webpageChunksDocNames = []
|
45 |
|
46 |
-
#####################################################################
|
47 |
-
# Create UI widgets.
|
48 |
-
output_widget = widgets.Output()
|
49 |
-
with output_widget:
|
50 |
-
print("### Create widgets entered.")
|
51 |
-
|
52 |
-
systemTextArea = widgets.Textarea(
|
53 |
-
value='',
|
54 |
-
placeholder='Enter System Prompt.',
|
55 |
-
description='Sys Prompt: ',
|
56 |
-
disabled=False,
|
57 |
-
layout=widgets.Layout(width='300px', height='80px')
|
58 |
-
)
|
59 |
-
|
60 |
-
userTextArea = widgets.Textarea(
|
61 |
-
value='',
|
62 |
-
placeholder='Enter User Prompt.',
|
63 |
-
description='User Prompt: ',
|
64 |
-
disabled=False,
|
65 |
-
layout=widgets.Layout(width='435px', height='110px')
|
66 |
-
)
|
67 |
-
|
68 |
-
ragPromptTextArea = widgets.Textarea(
|
69 |
-
value='',
|
70 |
-
placeholder='App generated prompt with RAG information.',
|
71 |
-
description='RAG Prompt: ',
|
72 |
-
disabled=False,
|
73 |
-
layout=widgets.Layout(width='580px', height='180px')
|
74 |
-
)
|
75 |
-
|
76 |
-
responseTextArea = widgets.Textarea(
|
77 |
-
value='',
|
78 |
-
placeholder='LLM generated response.',
|
79 |
-
description='LLM Resp: ',
|
80 |
-
disabled=False,
|
81 |
-
layout=widgets.Layout(width='780px', height='200px')
|
82 |
-
)
|
83 |
-
|
84 |
-
selectRag = widgets.Checkbox(
|
85 |
-
value=False,
|
86 |
-
description='Use RAG',
|
87 |
-
disabled=False
|
88 |
-
)
|
89 |
-
|
90 |
-
submitButton = widgets.Button(
|
91 |
-
description='Run Model.',
|
92 |
-
disabled=False,
|
93 |
-
button_style='', # 'success', 'info', 'warning', 'danger' or ''
|
94 |
-
tooltip='Click',
|
95 |
-
icon='check' # (FontAwesome names without the `fa-` prefix)
|
96 |
-
)
|
97 |
-
|
98 |
|
99 |
######################################################
|
100 |
# Connect to the Weaviate vector database.
|
@@ -377,13 +334,9 @@ def getRagData(promptText):
|
|
377 |
#collection = client.collections.get("Chunks")
|
378 |
return ragData
|
379 |
|
|
|
380 |
# Display UI
|
381 |
-
|
382 |
-
display(userTextArea)
|
383 |
-
display(ragPromptTextArea)
|
384 |
-
display(responseTextArea)
|
385 |
-
display(selectRag)
|
386 |
-
display(submitButton)
|
387 |
|
388 |
def runLLM(prompt):
|
389 |
max_tokens = 1000
|
@@ -419,17 +372,19 @@ def setPrompt(pprompt,ragFlag):
|
|
419 |
|
420 |
|
421 |
def on_submitButton_clicked(b):
|
422 |
-
|
423 |
-
|
424 |
-
|
425 |
-
|
426 |
-
|
427 |
-
|
428 |
-
|
429 |
-
|
|
|
|
|
|
|
430 |
|
431 |
submitButton.on_click(on_submitButton_clicked)
|
432 |
-
display(output_widget)
|
433 |
|
434 |
|
435 |
#logger.info("#### Closing client db connection.")
|
|
|
17 |
import llama_cpp
|
18 |
from llama_cpp import Llama
|
19 |
|
20 |
+
import streamlit as st
|
|
|
21 |
|
22 |
|
23 |
weaviate_logger = logging.getLogger("httpx")
|
|
|
26 |
logger = logging.getLogger(__name__)
|
27 |
logging.basicConfig(level=logging.INFO)
|
28 |
|
29 |
+
# Function to load the CSS file
|
30 |
+
def load_css(file_name):
|
31 |
+
with open(file_name) as f:
|
32 |
+
st.markdown(f'<style>{f.read()}</style>', unsafe_allow_html=True)
|
33 |
+
|
34 |
+
# Load the custom CSS
|
35 |
+
load_css("./.streamlit/main.css")
|
36 |
+
|
37 |
+
st.markdown("<h1 style='text-align: center; color: #666666;'>Vector Database RAG Proof of Concept</h1>", unsafe_allow_html=True)
|
38 |
+
st.markdown("<h6 style='text-align: center; color: #666666;'>V1</h6>", unsafe_allow_html=True)
|
39 |
|
40 |
|
41 |
######################################################################
|
|
|
52 |
webpageTitles = []
|
53 |
webpageChunksDocNames = []
|
54 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
55 |
|
56 |
######################################################
|
57 |
# Connect to the Weaviate vector database.
|
|
|
334 |
#collection = client.collections.get("Chunks")
|
335 |
return ragData
|
336 |
|
337 |
+
|
338 |
# Display UI
|
339 |
+
|
|
|
|
|
|
|
|
|
|
|
340 |
|
341 |
def runLLM(prompt):
|
342 |
max_tokens = 1000
|
|
|
372 |
|
373 |
|
374 |
def on_submitButton_clicked(b):
|
375 |
+
logger.debug("\n### on_submitButton_clicked")
|
376 |
+
st.session_state.sysTAtext = st.session_state.sysTA
|
377 |
+
logger.info(f"sysTAtext: {st.session_state.sysTAtext}")
|
378 |
+
|
379 |
+
st.session_state.userpTAtext = setPrompt("","")
|
380 |
+
st.session_state.userpTA = st.session_state.userpTAtext
|
381 |
+
logger.info(f"userpTAtext: {st.session_state.userpTAtext}")
|
382 |
+
|
383 |
+
st.session_state.rspTAtext = runLLM(st.session_state.userpTAtext)
|
384 |
+
st.session_state.rspTA = st.session_state.rspTAtext
|
385 |
+
logger.info(f"rspTAtext: {st.session_state.rspTAtext}")
|
386 |
|
387 |
submitButton.on_click(on_submitButton_clicked)
|
|
|
388 |
|
389 |
|
390 |
#logger.info("#### Closing client db connection.")
|
semsearchDbgUI.py
CHANGED
@@ -1,6 +1,13 @@
|
|
1 |
import streamlit as st
|
2 |
import logging
|
3 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
|
5 |
st.markdown("<h1 style='text-align: center; color: #666666;'>Vector Database RAG Proof of Concept</h1>", unsafe_allow_html=True)
|
6 |
st.markdown("<h6 style='text-align: center; color: #666666;'>V1</h6>", unsafe_allow_html=True)
|
@@ -8,21 +15,30 @@ st.markdown("<h6 style='text-align: center; color: #666666;'>V1</h6>", unsafe_al
|
|
8 |
logger = logging.getLogger(__name__)
|
9 |
logging.basicConfig(level=logging.DEBUG)
|
10 |
|
11 |
-
def runLLM(prompt):
|
12 |
-
log.debug("### runLLM")
|
13 |
-
result = ""
|
14 |
-
return(result)
|
15 |
|
16 |
def setPrompt(pprompt,ragFlag):
|
17 |
-
|
18 |
-
userPrompt = ""
|
19 |
return userPrompt
|
20 |
|
|
|
|
|
|
|
|
|
|
|
21 |
|
22 |
-
def on_submitButton_clicked(
|
23 |
-
|
24 |
-
|
25 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
|
27 |
|
28 |
######################################################################
|
@@ -30,13 +46,40 @@ def on_submitButton_clicked(b):
|
|
30 |
#
|
31 |
logger.info("#### MAINLINE ENTERED.")
|
32 |
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
|
40 |
-
#logger.info("\n### Before calling submitButton.on_click().")
|
41 |
-
#submitButton.on_click(on_submitButton_clicked)
|
42 |
|
|
|
1 |
import streamlit as st
|
2 |
import logging
|
3 |
|
4 |
+
# Function to load the CSS file
|
5 |
+
def load_css(file_name):
|
6 |
+
with open(file_name) as f:
|
7 |
+
st.markdown(f'<style>{f.read()}</style>', unsafe_allow_html=True)
|
8 |
+
|
9 |
+
# Load the custom CSS
|
10 |
+
load_css("./.streamlit/main.css")
|
11 |
|
12 |
st.markdown("<h1 style='text-align: center; color: #666666;'>Vector Database RAG Proof of Concept</h1>", unsafe_allow_html=True)
|
13 |
st.markdown("<h6 style='text-align: center; color: #666666;'>V1</h6>", unsafe_allow_html=True)
|
|
|
15 |
logger = logging.getLogger(__name__)
|
16 |
logging.basicConfig(level=logging.DEBUG)
|
17 |
|
|
|
|
|
|
|
|
|
18 |
|
19 |
def setPrompt(pprompt,ragFlag):
|
20 |
+
logger.debug("### setPrompt")
|
21 |
+
userPrompt = "USER PROMPT"
|
22 |
return userPrompt
|
23 |
|
24 |
+
def runLLM(prompt):
|
25 |
+
logger.debug("### runLLM")
|
26 |
+
result = "LLM RESULT"
|
27 |
+
return(result)
|
28 |
+
|
29 |
|
30 |
+
def on_submitButton_clicked():
|
31 |
+
logger.debug("\n### on_submitButton_clicked")
|
32 |
+
st.session_state.sysTAtext = st.session_state.sysTA
|
33 |
+
logger.info(f"sysTAtext: {st.session_state.sysTAtext}")
|
34 |
+
|
35 |
+
st.session_state.userpTAtext = setPrompt("","")
|
36 |
+
st.session_state.userpTA = st.session_state.userpTAtext
|
37 |
+
logger.info(f"userpTAtext: {st.session_state.userpTAtext}")
|
38 |
+
|
39 |
+
st.session_state.rspTAtext = runLLM(st.session_state.userpTAtext)
|
40 |
+
st.session_state.rspTA = st.session_state.rspTAtext
|
41 |
+
logger.info(f"rspTAtext: {st.session_state.rspTAtext}")
|
42 |
|
43 |
|
44 |
######################################################################
|
|
|
46 |
#
|
47 |
logger.info("#### MAINLINE ENTERED.")
|
48 |
|
49 |
+
col1, col2 = st.columns(2)
|
50 |
+
|
51 |
+
with col1:
|
52 |
+
if "sysTA" not in st.session_state:
|
53 |
+
st.session_state.sysTA = st.text_area(label="sysTA",value="fdsaf fsdafdsa")
|
54 |
+
elif "sysTAtext" in st.session_state:
|
55 |
+
st.session_state.sysTA = st.text_area(label="sysTA",value=st.session_state.sysTAtext)
|
56 |
+
else:
|
57 |
+
st.session_state.sysTA = st.text_area(label="sysTA",value=st.session_state.sysTA)
|
58 |
+
|
59 |
+
if "userpTA" not in st.session_state:
|
60 |
+
userTextArea = st.text_area(label="userpTA",value="fdsaf fsdafdsa")
|
61 |
+
elif "userpTAtext" in st.session_state:
|
62 |
+
st.session_state.userpTA = st.text_area(label="userpTA",value=st.session_state.userpTAtext)
|
63 |
+
else:
|
64 |
+
st.session_state.userpTA = st.text_area(label="userpTA",value=st.session_state.userpTA)
|
65 |
+
|
66 |
+
with col2:
|
67 |
+
if "ragpTA" not in st.session_state:
|
68 |
+
ragPromptTextArea = st.text_area(label="ragpTA",value="fdsaf fsdafdsa")
|
69 |
+
elif "ragpTAtext" in st.session_state:
|
70 |
+
st.session_state.ragpTA = st.text_area(label="ragpTA",value=st.session_state.ragpTAtext)
|
71 |
+
else:
|
72 |
+
st.session_state.ragTA = st.text_area(label="ragTA",value=st.session_state.ragTA)
|
73 |
+
|
74 |
+
if "rspTA" not in st.session_state:
|
75 |
+
responseTextArea = st.text_area(label="rspTA",value="fdsaf fsdafdsa")
|
76 |
+
elif "rspTAtext" in st.session_state:
|
77 |
+
st.session_state.rspTA = st.text_area(label="rspTA",value=st.session_state.rspTAtext)
|
78 |
+
else:
|
79 |
+
st.session_state.rspTA = st.text_area(label="rspTA",value=st.session_state.rspTA)
|
80 |
+
|
81 |
+
with st.sidebar:
|
82 |
+
st.selectRag = st.checkbox("Enable Query With RAG",value=False,key="selectRag",help=None,on_change=None,args=None,kwargs=None,disabled=False,label_visibility="visible")
|
83 |
+
st.submitButton = st.button("Run LLM Query",key=None,help=None,on_click=on_submitButton_clicked,args=None,kwargs=None,type="secondary",disabled=False,use_container_width=False)
|
84 |
|
|
|
|
|
85 |
|