tomas.helmfridsson commited on
Commit
487fc40
Β·
1 Parent(s): 1b048ee

update guis 12

Browse files
Files changed (1) hide show
  1. app.py +45 -39
app.py CHANGED
@@ -7,51 +7,57 @@ from langchain_huggingface.embeddings import HuggingFaceEmbeddings
7
  from langchain_huggingface.llms import HuggingFacePipeline
8
  from langchain.chains import RetrievalQA
9
 
10
- def load_vectorstore():
11
- docs, files = [], []
12
- for fn in os.listdir("document"):
13
- if fn.lower().endswith(".pdf"):
14
- docs.extend(PyPDFLoader(os.path.join("document", fn)).load_and_split())
15
- files.append(fn)
16
- emb = HuggingFaceEmbeddings(model_name="KBLab/sentence-bert-swedish-cased")
17
- vs = FAISS.from_documents(docs, emb)
18
- return vs, files
19
-
20
- # Bygg index + modell
21
- vectorstore, file_list = load_vectorstore()
22
  pipe = pipeline("text-generation", model="tiiuae/falcon-rw-1b", device=-1)
23
- llm = HuggingFacePipeline(pipeline=pipe, model_kwargs={"temperature":0.3,"max_new_tokens":512})
24
- qa = RetrievalQA.from_chain_type(llm=llm, retriever=vectorstore.as_retriever())
25
-
26
- # Gradio-UI
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  with gr.Blocks() as demo:
28
- status = gr.Markdown("πŸ”„ Laddar…", elem_id="status")
29
- status.visible = False
30
-
31
- gr.Markdown("**βœ… Klart!** FrΓ₯ga om PDF-filerna:\n\n" + "\n".join(f"- {f}" for f in file_list))
 
32
 
33
  with gr.Row():
34
- txt = gr.Textbox(label="Din frΓ₯ga:")
35
- temp = gr.Slider(0,1,value=0.3,step=0.05,label="Temperatur")
36
- send = gr.Button("Skicka")
37
 
38
  chatbot = gr.Chatbot()
39
 
40
- def chat_fn(message, temperature, history):
41
- if not message:
42
- return history
43
- if len(message)>1000:
44
- reply = f"⚠️ FrΓ₯gan Γ€r fΓΆr lΓ₯ng ({len(message)} tecken)."
45
- else:
46
- llm.model_kwargs["temperature"] = temperature
47
- try:
48
- reply = qa.invoke({"query":message})["result"]
49
- except Exception as e:
50
- reply = f"Ett fel uppstod: {e}"
51
- history = history or []
52
- history.append((message, reply))
53
- return history
54
-
55
- send.click(fn=chat_fn, inputs=[txt, temp, chatbot], outputs=chatbot)
56
 
57
  demo.launch()
 
7
  from langchain_huggingface.llms import HuggingFacePipeline
8
  from langchain.chains import RetrievalQA
9
 
10
+ # ─── 1) Ladda och indexera PDF:er ──────────────────────────────
11
+ docs, files = [], []
12
+ for fn in os.listdir("document"):
13
+ if fn.lower().endswith(".pdf"):
14
+ loader = PyPDFLoader(os.path.join("document", fn))
15
+ docs.extend(loader.load_and_split())
16
+ files.append(fn)
17
+ emb = HuggingFaceEmbeddings(model_name="KBLab/sentence-bert-swedish-cased")
18
+ vs = FAISS.from_documents(docs, emb)
19
+
20
+ # ─── 2) Initiera LLM & RAG-kedja ──────────────────────────────
 
21
  pipe = pipeline("text-generation", model="tiiuae/falcon-rw-1b", device=-1)
22
+ llm = HuggingFacePipeline(
23
+ pipeline=pipe,
24
+ model_kwargs={"temperature": 0.3, "max_new_tokens": 512}
25
+ )
26
+ qa = RetrievalQA.from_chain_type(llm=llm, retriever=vs.as_retriever())
27
+
28
+ # ─── 3) Chat-funktion ──────────────────────────────────────────
29
+ def chat_fn(message, temp, history):
30
+ history = history or []
31
+ # 3a) LΓ₯ng frΓ₯ga?
32
+ if len(message) > 1000:
33
+ history.append((message, f"⚠️ Din frΓ₯ga Γ€r fΓΆr lΓ₯ng ({len(message)} tecken)."))
34
+ return history
35
+ # 3b) Justera temperatur och hΓ€mta svar
36
+ llm.model_kwargs["temperature"] = temp
37
+ try:
38
+ answer = qa.invoke({"query": message})["result"]
39
+ except Exception as e:
40
+ answer = f"Ett fel uppstod: {e}"
41
+ history.append((message, answer))
42
+ return history
43
+
44
+ # ─── 4) Bygg Gradio‐UI ─────────────────────────────────────────
45
  with gr.Blocks() as demo:
46
+ gr.Markdown("## 🌟 Dokumentassistent (Svenska)")
47
+ gr.Markdown(
48
+ "**βœ… Laddade PDF-filer:**\n\n" +
49
+ "\n".join(f"- {f}" for f in files)
50
+ )
51
 
52
  with gr.Row():
53
+ txt = gr.Textbox(label="Din frΓ₯ga:", placeholder="Ex: Vad handlar dokumentet om?")
54
+ temp = gr.Slider(0.0, 1.0, value=0.3, step=0.05, label="Temperatur")
55
+ send = gr.Button("Skicka")
56
 
57
  chatbot = gr.Chatbot()
58
 
59
+ send.click(fn=chat_fn,
60
+ inputs=[txt, temp, chatbot],
61
+ outputs=chatbot)
 
 
 
 
 
 
 
 
 
 
 
 
 
62
 
63
  demo.launch()