File size: 2,644 Bytes
504b6a7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
# chatbot/chatbot.py

from flask import Flask, request, jsonify
from langchain.text_splitter import RecursiveCharacterTextSplitter
from sentence_transformers import SentenceTransformer
import chromadb
from chromadb.config import Settings
import openai
import os

# === CONFIG ===
GROQ_API_KEY = "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"
GROQ_MODEL = "llama3-8b-8192"
CHATBOT_TXT_PATH = "./chatbot/chatbot.txt"

# === Setup ===
app = Flask(__name__)
openai.api_key = GROQ_API_KEY
openai.api_base = "https://api.groq.com/openai/v1"

# === Load and split chatbot.txt ===
text = open(CHATBOT_TXT_PATH, encoding="utf-8").read()
splitter = RecursiveCharacterTextSplitter(chunk_size=300, chunk_overlap=100)
docs = [doc.strip() for doc in splitter.split_text(text)]

# === Embed and store in ChromaDB ===
embedder = SentenceTransformer("all-MiniLM-L6-v2")
embeddings = embedder.encode(docs, show_progress_bar=True, batch_size=32)

client = chromadb.Client(Settings(persist_directory="./chatbot/chroma_db", anonymized_telemetry=False))
collection = client.get_or_create_collection("chatbot")
ids = [f"doc_{i}" for i in range(len(docs))]
collection.add(documents=docs, embeddings=embeddings, ids=ids)

# === Core logic ===
def get_response(query: str) -> str:
    query_embedding = embedder.encode([query])[0]
    results = collection.query(query_embeddings=[query_embedding], n_results=3)
    retrieved_docs = results['documents'][0]
    context = "\n".join(retrieved_docs)

    system_prompt = (
        "You are a helpful assistant for the Codingo website. "
        "Only answer questions that are directly relevant to the context provided. "
        "If the user asks anything unrelated, politely refuse by saying: "
        "\"I'm only trained to answer questions about the Codingo platform.\""
    )

    user_prompt = f"Context:\n{context}\n\nQuestion: {query}"

    completion = openai.ChatCompletion.create(
        model=GROQ_MODEL,
        messages=[
            {"role": "system", "content": system_prompt},
            {"role": "user", "content": user_prompt},
        ],
        max_tokens=200,
        temperature=0.3,
    )

    return completion['choices'][0]['message']['content'].strip()

# === Flask route ===
@app.route("/chat", methods=["POST"])
def chat():
    user_input = request.json.get("message", "").strip()
    if not user_input:
        return jsonify({"error": "Empty message"}), 400

    try:
        reply = get_response(user_input)
        return jsonify({"response": reply})
    except Exception as e:
        return jsonify({"error": str(e)}), 500

if __name__ == "__main__":
    app.run(port=5001)