JustusI commited on
Commit
9f967c8
·
verified ·
1 Parent(s): 015bb98

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -108
app.py CHANGED
@@ -27,43 +27,26 @@ def load_vector_db(zip_file_path, extract_path):
27
  st.success("Vector store loaded")
28
  return vectordb
29
 
30
- # # Function to augment prompt
31
- # def augment_prompt(query, vectordb):
32
- # results = vectordb.similarity_search(query, k=10)
33
- # source_knowledge = "\n".join([x.page_content for x in results])
34
- # augmented_prompt = f"""
35
- # You are an AI assistant. Use the context provided below to answer the question as comprehensively as possible.
36
- # If the answer is not contained within the context, respond politely that you cannot provide that information.
37
-
38
- # Context:
39
- # {source_knowledge}
40
-
41
- # Question: {query}
42
- # """
43
- # return augmented_prompt
44
-
45
  # Function to augment prompt
46
- def augment_prompt(query, vectordb, search_results):
47
- results = vectordb.similarity_search(query, k=5)
48
  source_knowledge = "\n".join([x.page_content for x in results])
49
  augmented_prompt = f"""
50
  You are an AI assistant. Use the context provided below to answer the question as comprehensively as possible.
51
- If the answer is not contained within the context, respond with "I don't know".
52
 
53
  Context:
54
  {source_knowledge}
55
 
56
- Additional Web Search Results:
57
- {search_results}
58
-
59
  Question: {query}
60
  """
61
  return augmented_prompt
62
 
 
63
  # Function to handle chat with OpenAI
64
- def chat_with_openai(query, vectordb, openai_api_key, search_results):
65
- chat = ChatOpenAI(model_name="gpt-3.5-turbo", openai_api_key=openai_api_key, timeout=30) # Increased timeout
66
- augmented_query = augment_prompt(query, vectordb, search_results)
67
  prompt = HumanMessage(content=augmented_query)
68
  messages = [
69
  SystemMessage(content="You are a helpful assistant."),
@@ -72,37 +55,6 @@ def chat_with_openai(query, vectordb, openai_api_key, search_results):
72
  res = chat(messages)
73
  return res.content
74
 
75
- # Function to perform web search
76
- def perform_web_search(query):
77
- headers = {
78
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"}
79
- search_results = ""
80
-
81
- # Glassdoor search
82
- glassdoor_url = f"https://www.glassdoor.com/Search/results.htm?keyword={query}"
83
- response = requests.get(glassdoor_url, headers=headers)
84
- if response.status_code == 200:
85
- soup = BeautifulSoup(response.text, 'html.parser')
86
- glassdoor_results = soup.find_all('div', {'class': 'jobContainer'})
87
- for result in glassdoor_results[:5]: # limiting to first 3 results
88
- title = result.find('a', {'class': 'jobInfoItem jobTitle'}).text.strip() if result.find('a', {'class': 'jobInfoItem jobTitle'}) else 'N/A'
89
- company = result.find('div', {'class': 'jobInfoItem jobEmpolyerName'}).text.strip() if result.find('div', {'class': 'jobInfoItem jobEmpolyerName'}) else 'N/A'
90
- location = result.find('span', {'class': 'subtle loc'}).text.strip() if result.find('span', {'class': 'subtle loc'}) else 'N/A'
91
- search_results += f"Glassdoor Result: {title} at {company}, {location}\n"
92
-
93
- # Indeed search
94
- indeed_url = f"https://www.indeed.com/jobs?q={query}&limit=10"
95
- response = requests.get(indeed_url, headers=headers)
96
- if response.status_code == 200:
97
- soup = BeautifulSoup(response.text, 'html.parser')
98
- indeed_results = soup.find_all('div', {'class': 'jobsearch-SerpJobCard'})
99
- for result in indeed_results[:5]: # limiting to first 3 results
100
- title = result.find('h2', {'class': 'title'}).text.strip() if result.find('h2', {'class': 'title'}) else 'N/A'
101
- company = result.find('span', {'class': 'company'}).text.strip() if result.find('span', {'class': 'company'}) else 'N/A'
102
- location = result.find('span', {'class': 'location'}).text.strip() if result.find('span', {'class': 'location'}) else 'N/A'
103
- search_results += f"Indeed Result: {title} at {company}, {location}\n"
104
-
105
- return search_results
106
 
107
  # Streamlit UI
108
  st.title("Data Roles Company Finder Chatbot")
@@ -122,69 +74,19 @@ for message in st.session_state.messages:
122
  with st.chat_message(message["role"]):
123
  st.markdown(message["content"])
124
 
 
125
  # User input
126
  if prompt := st.chat_input("Enter your query"):
127
  st.session_state.messages.append({"role": "user", "content": prompt})
128
  with st.chat_message("user"):
129
  st.markdown(prompt)
130
 
131
- # Perform web search
132
- search_results = perform_web_search(prompt)
133
-
134
- # Chat with OpenAI
135
- openai_api_key = st.secrets["OPENAI_API_KEY"]
136
- response = chat_with_openai(prompt, vectordb, openai_api_key, search_results)
137
-
138
- # Display assistant response
139
  with st.chat_message("assistant"):
 
 
140
  st.markdown(response)
141
 
142
  st.session_state.messages.append({"role": "assistant", "content": response})
143
-
144
- # # Function to handle chat with OpenAI
145
- # def chat_with_openai(query, vectordb, openai_api_key):
146
- # chat = ChatOpenAI(model_name="gpt-3.5-turbo", openai_api_key=openai_api_key)
147
- # augmented_query = augment_prompt(query, vectordb)
148
- # prompt = HumanMessage(content=augmented_query)
149
- # messages = [
150
- # SystemMessage(content="You are a helpful assistant."),
151
- # prompt
152
- # ]
153
- # res = chat(messages)
154
- # return res.content
155
-
156
-
157
- # # Streamlit UI
158
- # st.title("Data Roles Company Finder Chatbot")
159
- # st.write("This app helps users find companies hiring for data roles, providing information such as job title, salary estimate, job description, company rating, and more.")
160
-
161
- # # Load vector database
162
- # zip_file_path = "chroma_db_compressed_.zip"
163
- # extract_path = "./chroma_db_extracted"
164
- # vectordb = load_vector_db(zip_file_path, extract_path)
165
-
166
- # # Initialize session state for chat history
167
- # if "messages" not in st.session_state:
168
- # st.session_state.messages = []
169
-
170
- # # Display chat history
171
- # for message in st.session_state.messages:
172
- # with st.chat_message(message["role"]):
173
- # st.markdown(message["content"])
174
-
175
-
176
- # # User input
177
- # if prompt := st.chat_input("Enter your query"):
178
- # st.session_state.messages.append({"role": "user", "content": prompt})
179
- # with st.chat_message("user"):
180
- # st.markdown(prompt)
181
-
182
- # with st.chat_message("assistant"):
183
- # openai_api_key = st.secrets["OPENAI_API_KEY"]
184
- # response = chat_with_openai(prompt, vectordb, openai_api_key)
185
- # st.markdown(response)
186
-
187
- # st.session_state.messages.append({"role": "assistant", "content": response})
188
 
189
  # # Query input
190
  # query = st.text_input("Enter your query", "")
 
27
  st.success("Vector store loaded")
28
  return vectordb
29
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  # Function to augment prompt
31
+ def augment_prompt(query, vectordb):
32
+ results = vectordb.similarity_search(query, k=10)
33
  source_knowledge = "\n".join([x.page_content for x in results])
34
  augmented_prompt = f"""
35
  You are an AI assistant. Use the context provided below to answer the question as comprehensively as possible.
36
+ If the answer is not contained within the context, respond politely that you cannot provide that information.
37
 
38
  Context:
39
  {source_knowledge}
40
 
 
 
 
41
  Question: {query}
42
  """
43
  return augmented_prompt
44
 
45
+
46
  # Function to handle chat with OpenAI
47
+ def chat_with_openai(query, vectordb, openai_api_key):
48
+ chat = ChatOpenAI(model_name="gpt-3.5-turbo", openai_api_key=openai_api_key)
49
+ augmented_query = augment_prompt(query, vectordb)
50
  prompt = HumanMessage(content=augmented_query)
51
  messages = [
52
  SystemMessage(content="You are a helpful assistant."),
 
55
  res = chat(messages)
56
  return res.content
57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
 
59
  # Streamlit UI
60
  st.title("Data Roles Company Finder Chatbot")
 
74
  with st.chat_message(message["role"]):
75
  st.markdown(message["content"])
76
 
77
+
78
  # User input
79
  if prompt := st.chat_input("Enter your query"):
80
  st.session_state.messages.append({"role": "user", "content": prompt})
81
  with st.chat_message("user"):
82
  st.markdown(prompt)
83
 
 
 
 
 
 
 
 
 
84
  with st.chat_message("assistant"):
85
+ openai_api_key = st.secrets["OPENAI_API_KEY"]
86
+ response = chat_with_openai(prompt, vectordb, openai_api_key)
87
  st.markdown(response)
88
 
89
  st.session_state.messages.append({"role": "assistant", "content": response})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
 
91
  # # Query input
92
  # query = st.text_input("Enter your query", "")