Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,12 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
|
3 |
-
|
4 |
-
|
5 |
-
except ImportError:
|
6 |
-
from langchain.document_loaders import PyPDFLoader
|
7 |
|
8 |
-
def
|
9 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
-
|
12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import tempfile
|
3 |
+
import shutil
|
4 |
+
import chromadb
|
5 |
+
from langchain_community.document_loaders import PyPDFLoader
|
6 |
+
from langchain_community.vectorstores import Chroma
|
7 |
+
from langchain_community.embeddings import HuggingFaceEmbeddings
|
8 |
+
from langchain.chains import RetrievalQA
|
9 |
+
from langchain_community.llms import HuggingFaceHub
|
10 |
import gradio as gr
|
11 |
|
12 |
+
DB_DIR = "chroma_db"
|
13 |
+
os.makedirs(DB_DIR, exist_ok=True)
|
|
|
|
|
14 |
|
15 |
+
def load_and_index_pdf(pdf_file):
|
16 |
+
with tempfile.TemporaryDirectory() as tmpdir:
|
17 |
+
pdf_path = os.path.join(tmpdir, pdf_file.name)
|
18 |
+
shutil.copy(pdf_file.name, pdf_path)
|
19 |
+
loader = PyPDFLoader(pdf_path)
|
20 |
+
documents = loader.load_and_split()
|
21 |
+
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
|
22 |
+
vectordb = Chroma.from_documents(documents, embedding=embeddings, persist_directory=DB_DIR)
|
23 |
+
vectordb.persist()
|
24 |
+
return "✅ PDF تمت معالجته بنجاح! يمكنك الآن طرح الأسئلة."
|
25 |
|
26 |
+
def answer_question(question):
|
27 |
+
if not os.path.exists(DB_DIR) or not os.listdir(DB_DIR):
|
28 |
+
return "❌ الرجاء رفع ملف PDF أولًا."
|
29 |
+
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
|
30 |
+
vectordb = Chroma(persist_directory=DB_DIR, embedding_function=embeddings)
|
31 |
+
retriever = vectordb.as_retriever()
|
32 |
+
llm = HuggingFaceHub(repo_id="mistralai/Mistral-7B-Instruct-v0.2", model_kwargs={"temperature": 0.5, "max_new_tokens": 512})
|
33 |
+
qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever)
|
34 |
+
return qa.run(question)
|
35 |
+
|
36 |
+
with gr.Blocks() as demo:
|
37 |
+
gr.Markdown("## 🤖 Smart PDF Assistant - مساعدك الذكي في قراءة وفهم ملفات PDF")
|
38 |
+
|
39 |
+
with gr.Tab("📁 تحميل PDF"):
|
40 |
+
pdf_input = gr.File(label="ارفع ملف PDF", file_types=[".pdf"])
|
41 |
+
upload_output = gr.Textbox(label="حالة المعالجة")
|
42 |
+
upload_btn = gr.Button("📄 معالجة الملف")
|
43 |
+
upload_btn.click(fn=load_and_index_pdf, inputs=pdf_input, outputs=upload_output)
|
44 |
+
|
45 |
+
with gr.Tab("❓ اسأل سؤالك"):
|
46 |
+
question = gr.Textbox(label="اكتب سؤالك هنا")
|
47 |
+
answer = gr.Textbox(label="الإجابة", lines=5)
|
48 |
+
ask_btn = gr.Button("🔍 إرسال السؤال")
|
49 |
+
ask_btn.click(fn=answer_question, inputs=question, outputs=answer)
|
50 |
+
|
51 |
+
demo.launch()
|