awacke1 commited on
Commit
2db9a0a
·
verified ·
1 Parent(s): bb51012

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -243,7 +243,7 @@ def search_glossary(query):
243
  "mistralai/Mixtral-8x7B-Instruct-v0.1", # LLM Model Dropdown component
244
  api_name="/update_with_rag_md"
245
  )
246
- st.code(response1[0], language="python", *, line_numbers=True, wrap_lines=False)
247
 
248
 
249
  # ArXiv searcher - Paper Summary & Ask LLM
@@ -256,13 +256,15 @@ def search_glossary(query):
256
  llm_model_picked=model_choice,
257
  api_name="/update_with_rag_md"
258
  )
259
-
 
260
  response2 = client.predict(
261
  prompt=query,
262
  llm_model_picked=model_choice,
263
  stream_outputs=True,
264
  api_name="/ask_llm"
265
  )
 
266
 
267
  # Aggregate hyperlinks and show with emojis
268
  hyperlinks = extract_hyperlinks([response1, response2])
 
243
  "mistralai/Mixtral-8x7B-Instruct-v0.1", # LLM Model Dropdown component
244
  api_name="/update_with_rag_md"
245
  )
246
+ st.code(response1[0], language="python", line_numbers=True, wrap_lines=False)
247
 
248
 
249
  # ArXiv searcher - Paper Summary & Ask LLM
 
256
  llm_model_picked=model_choice,
257
  api_name="/update_with_rag_md"
258
  )
259
+ st.code(response1, language="python", line_numbers=True, wrap_lines=False)
260
+
261
  response2 = client.predict(
262
  prompt=query,
263
  llm_model_picked=model_choice,
264
  stream_outputs=True,
265
  api_name="/ask_llm"
266
  )
267
+ st.code(response2, language="python", line_numbers=True, wrap_lines=False)
268
 
269
  # Aggregate hyperlinks and show with emojis
270
  hyperlinks = extract_hyperlinks([response1, response2])