Spaces:
Sleeping
Sleeping
Upload 6 files
Browse files- .gitattributes +2 -0
- README.md +16 -0
- app.py +33 -0
- faiss_index/documents.pkl +3 -0
- faiss_index/faiss_index.faiss +3 -0
- rag_utils.py +46 -0
- requirements.txt +5 -0
.gitattributes
CHANGED
@@ -1,2 +1,4 @@
|
|
1 |
edu_pilot_gradio_space_final/faiss_index/documents.pkl filter=lfs diff=lfs merge=lfs -text
|
2 |
edu_pilot_gradio_space_final/faiss_index/faiss_index.faiss filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
1 |
edu_pilot_gradio_space_final/faiss_index/documents.pkl filter=lfs diff=lfs merge=lfs -text
|
2 |
edu_pilot_gradio_space_final/faiss_index/faiss_index.faiss filter=lfs diff=lfs merge=lfs -text
|
3 |
+
faiss_index/documents.pkl filter=lfs diff=lfs merge=lfs -text
|
4 |
+
faiss_index/faiss_index.faiss filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
title: EduPilot
|
3 |
+
emoji: 🎓
|
4 |
+
colorFrom: blue
|
5 |
+
colorTo: yellow
|
6 |
+
sdk: gradio
|
7 |
+
sdk_version: "4.20.0"
|
8 |
+
app_file: app.py
|
9 |
+
pinned: false
|
10 |
+
---
|
11 |
+
|
12 |
+
# EduPilot – Chatbot d’Orientation IA
|
13 |
+
|
14 |
+
Bienvenue ! Ce chatbot est un conseiller d'orientation IA intelligent, entraîné à répondre aux questions sur les études, les filières, et les métiers.
|
15 |
+
|
16 |
+
Pose-lui toutes tes questions sur ton avenir scolaire 🤖🎓
|
app.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import gradio as gr
|
3 |
+
from rag_utils import load_faiss_index, get_embedding_model, query_index, nettoyer_context, generate_answer
|
4 |
+
|
5 |
+
# Chargement de l'index et du modèle d'embedding
|
6 |
+
index, documents = load_faiss_index()
|
7 |
+
embedder = get_embedding_model()
|
8 |
+
|
9 |
+
# Fonction de réponse avec gestion d'erreur
|
10 |
+
def ask_edu_pilot(message, history):
|
11 |
+
try:
|
12 |
+
context = query_index(message, index, documents, embedder)
|
13 |
+
cleaned_context = nettoyer_context("\n".join(context))
|
14 |
+
answer = generate_answer(message, cleaned_context)
|
15 |
+
return answer
|
16 |
+
except Exception as e:
|
17 |
+
print("Erreur lors de l'appel au modèle :", e)
|
18 |
+
return "😓 Le conseiller IA est temporairement indisponible. Merci de réessayer plus tard."
|
19 |
+
|
20 |
+
# Message d'accueil dans le chat
|
21 |
+
welcome_message = "👋 Bonjour ! Je suis **EduPilot**, ton conseiller IA.
|
22 |
+
|
23 |
+
Pose-moi une question sur ton avenir scolaire, les filières, les écoles ou les métiers qui t'intéressent. 🎓"
|
24 |
+
|
25 |
+
# Interface Gradio stylisée
|
26 |
+
gr.ChatInterface(
|
27 |
+
fn=ask_edu_pilot,
|
28 |
+
chatbot=gr.Chatbot(label="🎓 EduPilot - Conseiller IA", bubble_full_width=False, show_copy_button=True),
|
29 |
+
textbox=gr.Textbox(placeholder="Exemple : Que faire après un bac pro ?", container=True, scale=7),
|
30 |
+
title="🎓 EduPilot - Chatbot d'Orientation Scolaire",
|
31 |
+
theme=gr.themes.Soft(primary_hue="blue", secondary_hue="yellow"),
|
32 |
+
description=welcome_message,
|
33 |
+
).launch()
|
faiss_index/documents.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4cc6100b51468166d2e0b5e0ca119f239e648cb5d539dd256dc886ef39f45f46
|
3 |
+
size 36366182
|
faiss_index/faiss_index.faiss
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:43efadcf8c063cedf2414c75df1dd801404c0ba263b0e35de8f21c25436d0694
|
3 |
+
size 165167661
|
rag_utils.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import faiss
|
2 |
+
import pickle
|
3 |
+
import numpy as np
|
4 |
+
import re
|
5 |
+
from sentence_transformers import SentenceTransformer
|
6 |
+
from huggingface_hub import hf_hub_download
|
7 |
+
from llama_cpp import Llama
|
8 |
+
|
9 |
+
def load_faiss_index(index_path="faiss_index/faiss_index.faiss", doc_path="faiss_index/documents.pkl"):
|
10 |
+
index = faiss.read_index(index_path)
|
11 |
+
with open(doc_path, "rb") as f:
|
12 |
+
documents = pickle.load(f)
|
13 |
+
return index, documents
|
14 |
+
|
15 |
+
def get_embedding_model():
|
16 |
+
return SentenceTransformer("sentence-transformers/multi-qa-MiniLM-L6-cos-v1")
|
17 |
+
|
18 |
+
def query_index(question, index, documents, model, k=3):
|
19 |
+
question_embedding = model.encode([question])
|
20 |
+
_, indices = index.search(np.array(question_embedding).astype("float32"), k)
|
21 |
+
return [documents[i] for i in indices[0]]
|
22 |
+
|
23 |
+
def nettoyer_context(context):
|
24 |
+
context = re.sub(r"\[\'(.*?)\'\]", r"\1", context)
|
25 |
+
context = context.replace("None", "")
|
26 |
+
return context
|
27 |
+
|
28 |
+
|
29 |
+
|
30 |
+
import os
|
31 |
+
from huggingface_hub import InferenceClient
|
32 |
+
|
33 |
+
client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.1", token=os.environ.get("edup"))
|
34 |
+
|
35 |
+
def generate_answer(question, context):
|
36 |
+
prompt = f"""Voici des informations sur des établissements et formations :
|
37 |
+
|
38 |
+
{context}
|
39 |
+
|
40 |
+
Formule ta réponse comme un conseiller d’orientation bienveillant, de manière fluide et naturelle.
|
41 |
+
|
42 |
+
Question : {question}
|
43 |
+
Réponse :"""
|
44 |
+
|
45 |
+
response = client.text_generation(prompt, max_new_tokens=300)
|
46 |
+
return response
|
requirements.txt
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
gradio
|
2 |
+
sentence-transformers
|
3 |
+
faiss-cpu
|
4 |
+
numpy
|
5 |
+
requests
|