farmax commited on
Commit
97342c7
·
verified ·
1 Parent(s): 6a0a548

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -18
app.py CHANGED
@@ -225,28 +225,14 @@ def initialize_database(list_file_obj, chunk_size, chunk_overlap, progress=gr.Pr
225
  progress(0.9, desc="Done!")
226
  return vector_db, collection_name, "Complete!"
227
 
228
- def initialize_LLM(llm_option, llm_temperature, max_tokens, top_k, vector_db, progress=gr.Progress(), language=None):
229
- # Definiamo le lingue supportate
230
- supported_languages = ['it', 'en'] # Italiano e inglese
231
-
232
- # Se la lingua non è specificata, usiamo l'italiano come default
233
- if language is None:
234
- language = 'it'
235
-
236
- # Verifichiamo se la lingua è supportata
237
- if language not in supported_languages:
238
- raise ValueError(f"Lingua non supportata. Supportati: {supported_languages}")
239
-
240
- # Convertiamo la lingua in minuscolo per la compatibilità con il modello
241
- language = language.lower()
242
-
243
  llm_name = list_llm[llm_option]
244
  print(f"Nome del modello: {llm_name}")
245
 
246
- # Creiamo il chain LL.M. con la lingua specificata
247
- qa_chain = initialize_llmchain(llm_name, llm_temperature, max_tokens, top_k, vector_db, progress, language)
248
 
249
- return qa_chain, "Complete!"
250
 
251
  def format_chat_history(message, chat_history):
252
  formatted_chat_history = []
 
225
  progress(0.9, desc="Done!")
226
  return vector_db, collection_name, "Complete!"
227
 
228
+ def initialize_LLM(llm_option, llm_temperature, max_tokens, top_k, vector_db, progress=gr.Progress()):
229
+ # print("llm_option",llm_option)
 
 
 
 
 
 
 
 
 
 
 
 
 
230
  llm_name = list_llm[llm_option]
231
  print(f"Nome del modello: {llm_name}")
232
 
233
+ qa_chain = initialize_llmchain(llm_name, llm_temperature, max_tokens, top_k, vector_db, progress)
 
234
 
235
+ return qa_chain, "Completato!"
236
 
237
  def format_chat_history(message, chat_history):
238
  formatted_chat_history = []