Spaces:
Sleeping
Sleeping
import os | |
import shutil | |
import gradio as gr | |
from langchain_community.document_loaders import PyPDFLoader | |
from langchain.text_splitter import RecursiveCharacterTextSplitter | |
from langchain.vectorstores import Chroma | |
from langchain.embeddings import HuggingFaceEmbeddings | |
from langchain.chains import RetrievalQA | |
from langchain_community.llms import HuggingFaceHub | |
# إعداد مجلد الحفظ | |
os.makedirs("docs", exist_ok=True) | |
# نسخ آمن للملف لتفادي الخطأ | |
def safe_copy(src_path, dst_folder="docs"): | |
filename = os.path.basename(src_path) | |
dst_path = os.path.join(dst_folder, filename) | |
if os.path.abspath(src_path) != os.path.abspath(dst_path): | |
shutil.copy(src_path, dst_path) | |
return dst_path | |
# تحميل ومعالجة الملف | |
def process_file(file): | |
try: | |
file_path = safe_copy(file.name) | |
loader = PyPDFLoader(file_path) | |
pages = loader.load_and_split() | |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200) | |
texts = text_splitter.split_documents(pages) | |
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2") | |
db = Chroma.from_documents(texts, embeddings) | |
retriever = db.as_retriever(search_kwargs={"k": 3}) | |
llm = HuggingFaceHub(repo_id="mistralai/Mistral-7B-Instruct-v0.2", model_kwargs={"temperature":0.5, "max_new_tokens":512}) | |
qa_chain = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever, return_source_documents=True) | |
return qa_chain, "تم تحميل الملف بنجاح، يمكنك الآن طرح الأسئلة." | |
except Exception as e: | |
return None, f"حدث خطأ: {e}" | |
# متغير عالمي للاحتفاظ بالسلسلة | |
qa_chain = None | |
# الدالة التي تجيب على السؤال | |
def answer_question(question): | |
if qa_chain is None: | |
return "الرجاء رفع ملف أولاً." | |
result = qa_chain({"query": question}) | |
answer = result["result"] | |
sources = "\n".join([doc.metadata.get("source", "") for doc in result["source_documents"]]) | |
return f"🔹 **الإجابة:**\n{answer}\n\n🔹 **المصدر:**\n{sources}" | |
# واجهة Gradio | |
with gr.Blocks(title="Smart PDF Assistant") as demo: | |
gr.Markdown("## 🤖 مساعد الكتب الذكي - Smart PDF Assistant") | |
file_input = gr.File(label="📄 ارفع ملف PDF", type="file") | |
upload_button = gr.Button("🔁 تحميل الملف ومعالجته") | |
status_output = gr.Textbox(label="📢 حالة التحميل", interactive=False) | |
question_input = gr.Textbox(label="❓ اكتب سؤالك هنا") | |
answer_output = gr.Markdown(label="📘 الإجابة") | |
def handle_upload(file): | |
global qa_chain | |
qa_chain, status = process_file(file) | |
return status | |
upload_button.click(fn=handle_upload, inputs=[file_input], outputs=[status_output]) | |
question_input.submit(fn=answer_question, inputs=[question_input], outputs=[answer_output]) | |
# شغّل التطبيق | |
if __name__ == "__main__": | |
demo.launch() | |