Shreyas094 commited on
Commit
6f866bb
·
verified ·
1 Parent(s): daa35b6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -13
app.py CHANGED
@@ -78,7 +78,7 @@ def update_vectors(files, parser):
78
 
79
  return f"Vector store updated successfully. Processed {total_chunks} chunks from {len(files)} files using {parser}."
80
 
81
- def generate_chunked_response(prompt, model, max_tokens=1000, num_calls=3, temperature=0.2, stop_clicked=False):
82
  print(f"Starting generate_chunked_response with {num_calls} calls")
83
  client = InferenceClient(model, token=huggingface_token)
84
  full_responses = []
@@ -86,7 +86,7 @@ def generate_chunked_response(prompt, model, max_tokens=1000, num_calls=3, tempe
86
 
87
  for i in range(num_calls):
88
  print(f"Starting API call {i+1}")
89
- if stop_clicked(): # Assume stop_clicked is now a function that returns the current state
90
  print("Stop clicked, breaking loop")
91
  break
92
  try:
@@ -97,7 +97,7 @@ def generate_chunked_response(prompt, model, max_tokens=1000, num_calls=3, tempe
97
  temperature=temperature,
98
  stream=True,
99
  ):
100
- if stop_clicked():
101
  print("Stop clicked during streaming, breaking")
102
  break
103
  if message.choices and message.choices[0].delta and message.choices[0].delta.content:
@@ -129,7 +129,7 @@ class CitingSources(BaseModel):
129
  description="List of sources to cite. Should be an URL of the source."
130
  )
131
 
132
- def get_response_with_search(query, model, num_calls=3, temperature=0.2):
133
  search_results = duckduckgo_search(query)
134
  context = "\n".join(f"{result['title']}\n{result['body']}\nSource: {result['href']}\n"
135
  for result in search_results if 'body' in result)
@@ -139,8 +139,8 @@ def get_response_with_search(query, model, num_calls=3, temperature=0.2):
139
  Write a detailed and complete research document that fulfills the following user request: '{query}'
140
  After writing the document, please provide a list of sources used in your response. [/INST]"""
141
 
142
- generated_text = generate_chunked_response(prompt, model, num_calls=num_calls, temperature=temperature)
143
-
144
  # Clean the response
145
  clean_text = re.sub(r'<s>\[INST\].*?\[/INST\]\s*', '', generated_text, flags=re.DOTALL)
146
  clean_text = clean_text.replace("Using the following context:", "").strip()
@@ -152,7 +152,7 @@ After writing the document, please provide a list of sources used in your respon
152
 
153
  return main_content, sources
154
 
155
- def get_response_from_pdf(query, model, num_calls=3, temperature=0.2):
156
  embed = get_embeddings()
157
  if os.path.exists("faiss_database"):
158
  database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
@@ -168,7 +168,7 @@ def get_response_from_pdf(query, model, num_calls=3, temperature=0.2):
168
  Write a detailed and complete response that answers the following user question: '{query}'
169
  Do not include a list of sources in your response. [/INST]"""
170
 
171
- generated_text = generate_chunked_response(prompt, model, num_calls=num_calls, temperature=temperature)
172
 
173
  # Clean the response
174
  clean_text = re.sub(r'<s>\[INST\].*?\[/INST\]\s*', '', generated_text, flags=re.DOTALL)
@@ -256,22 +256,22 @@ with gr.Blocks() as demo:
256
  print("Already generating, returning")
257
  return message, history, is_generating, stop_clicked
258
  is_generating = True
259
- stop_clicked = False # Reset stop_clicked at the start of generation
260
 
261
  try:
262
  print(f"Generating response for: {message}")
263
  if use_web_search:
264
  print("Using web search")
265
- main_content, sources = get_response_with_search(message, model, num_calls=num_calls, temperature=temperature)
266
  formatted_response = f"{main_content}\n\nSources:\n{sources}"
267
  else:
268
  print("Using PDF search")
269
- response = get_response_from_pdf(message, model, num_calls=num_calls, temperature=temperature)
270
  formatted_response = response
271
 
272
  print(f"Generated response: {formatted_response[:100]}...")
273
 
274
- if not stop_clicked():
275
  print("Appending to history")
276
  history.append((message, formatted_response))
277
  else:
@@ -291,7 +291,7 @@ with gr.Blocks() as demo:
291
  )
292
  print(f"New history has {len(new_history)} items")
293
  return "", new_history, new_is_generating, new_stop_clicked
294
-
295
  submit_btn.click(
296
  on_submit,
297
  inputs=[msg, chatbot, use_web_search, model_dropdown, temperature_slider, num_calls_slider, is_generating, stop_clicked],
 
78
 
79
  return f"Vector store updated successfully. Processed {total_chunks} chunks from {len(files)} files using {parser}."
80
 
81
+ def generate_chunked_response(prompt, model, max_tokens=1000, num_calls=3, temperature=0.2, stop_clicked=None):
82
  print(f"Starting generate_chunked_response with {num_calls} calls")
83
  client = InferenceClient(model, token=huggingface_token)
84
  full_responses = []
 
86
 
87
  for i in range(num_calls):
88
  print(f"Starting API call {i+1}")
89
+ if stop_clicked and stop_clicked.value: # Check if stop_clicked is not None and its value is True
90
  print("Stop clicked, breaking loop")
91
  break
92
  try:
 
97
  temperature=temperature,
98
  stream=True,
99
  ):
100
+ if stop_clicked and stop_clicked.value:
101
  print("Stop clicked during streaming, breaking")
102
  break
103
  if message.choices and message.choices[0].delta and message.choices[0].delta.content:
 
129
  description="List of sources to cite. Should be an URL of the source."
130
  )
131
 
132
+ def get_response_with_search(query, model, num_calls=3, temperature=0.2, stop_clicked=None):
133
  search_results = duckduckgo_search(query)
134
  context = "\n".join(f"{result['title']}\n{result['body']}\nSource: {result['href']}\n"
135
  for result in search_results if 'body' in result)
 
139
  Write a detailed and complete research document that fulfills the following user request: '{query}'
140
  After writing the document, please provide a list of sources used in your response. [/INST]"""
141
 
142
+ generated_text = generate_chunked_response(prompt, model, num_calls=num_calls, temperature=temperature, stop_clicked=stop_clicked)
143
+
144
  # Clean the response
145
  clean_text = re.sub(r'<s>\[INST\].*?\[/INST\]\s*', '', generated_text, flags=re.DOTALL)
146
  clean_text = clean_text.replace("Using the following context:", "").strip()
 
152
 
153
  return main_content, sources
154
 
155
+ def get_response_from_pdf(query, model, num_calls=3, temperature=0.2, stop_clicked=None):
156
  embed = get_embeddings()
157
  if os.path.exists("faiss_database"):
158
  database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
 
168
  Write a detailed and complete response that answers the following user question: '{query}'
169
  Do not include a list of sources in your response. [/INST]"""
170
 
171
+ generated_text = generate_chunked_response(prompt, model, num_calls=num_calls, temperature=temperature, stop_clicked=stop_clicked)
172
 
173
  # Clean the response
174
  clean_text = re.sub(r'<s>\[INST\].*?\[/INST\]\s*', '', generated_text, flags=re.DOTALL)
 
256
  print("Already generating, returning")
257
  return message, history, is_generating, stop_clicked
258
  is_generating = True
259
+ stop_clicked.value = False # Reset stop_clicked at the start of generation
260
 
261
  try:
262
  print(f"Generating response for: {message}")
263
  if use_web_search:
264
  print("Using web search")
265
+ main_content, sources = get_response_with_search(query, model, num_calls=num_calls, temperature=temperature)
266
  formatted_response = f"{main_content}\n\nSources:\n{sources}"
267
  else:
268
  print("Using PDF search")
269
+ response = get_response_from_pdf(query, model, num_calls=num_calls, temperature=temperature)
270
  formatted_response = response
271
 
272
  print(f"Generated response: {formatted_response[:100]}...")
273
 
274
+ if not stop_clicked.value:
275
  print("Appending to history")
276
  history.append((message, formatted_response))
277
  else:
 
291
  )
292
  print(f"New history has {len(new_history)} items")
293
  return "", new_history, new_is_generating, new_stop_clicked
294
+
295
  submit_btn.click(
296
  on_submit,
297
  inputs=[msg, chatbot, use_web_search, model_dropdown, temperature_slider, num_calls_slider, is_generating, stop_clicked],