Spaces:
Paused
Paused
Commit
Β·
a3c2881
1
Parent(s):
9ee49ff
qdrant loaded
Browse files
backend/services/interview_engine.py
CHANGED
@@ -11,9 +11,24 @@ import torch
|
|
11 |
from backend.services.interview_retrieval import (
|
12 |
extract_all_roles_from_qdrant,
|
13 |
retrieve_interview_data,
|
14 |
-
random_context_chunks
|
|
|
|
|
15 |
)
|
16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
|
18 |
if torch.cuda.is_available():
|
19 |
print("π₯ CUDA Available")
|
@@ -103,9 +118,18 @@ load_whisper_model()
|
|
103 |
def generate_first_question(profile, job):
|
104 |
"""Generate the first interview question based on profile and job"""
|
105 |
all_roles = extract_all_roles_from_qdrant()
|
|
|
|
|
106 |
retrieved_data = retrieve_interview_data(job.role.lower(), all_roles)
|
107 |
-
|
|
|
|
|
|
|
|
|
|
|
108 |
|
|
|
|
|
109 |
try:
|
110 |
prompt = f"""
|
111 |
You are conducting an interview for a {job.role} position at {job.company}.
|
|
|
11 |
from backend.services.interview_retrieval import (
|
12 |
extract_all_roles_from_qdrant,
|
13 |
retrieve_interview_data,
|
14 |
+
random_context_chunks,
|
15 |
+
get_role_questions, # π For sample questions
|
16 |
+
qdrant_client # π For collection info
|
17 |
)
|
18 |
|
19 |
+
try:
|
20 |
+
print("π Qdrant Collections:", qdrant_client.get_collections())
|
21 |
+
info = qdrant_client.get_collection("interview_questions")
|
22 |
+
print("β
Vector size:", info.config.params.vectors.size)
|
23 |
+
print("β
Distance metric:", info.config.params.vectors.distance)
|
24 |
+
|
25 |
+
all_roles_debug = extract_all_roles_from_qdrant()
|
26 |
+
print(f"β
Found {len(all_roles_debug)} roles:", all_roles_debug)
|
27 |
+
if all_roles_debug:
|
28 |
+
sample_questions_debug = get_role_questions(all_roles_debug[0])
|
29 |
+
print(f"β
Sample questions for '{all_roles_debug[0]}': {len(sample_questions_debug)} found")
|
30 |
+
except Exception as e:
|
31 |
+
print("β οΈ Qdrant check failed:", e)
|
32 |
|
33 |
if torch.cuda.is_available():
|
34 |
print("π₯ CUDA Available")
|
|
|
118 |
def generate_first_question(profile, job):
|
119 |
"""Generate the first interview question based on profile and job"""
|
120 |
all_roles = extract_all_roles_from_qdrant()
|
121 |
+
logging.info(f"[QDRANT DEBUG] Available Roles: {all_roles}")
|
122 |
+
|
123 |
retrieved_data = retrieve_interview_data(job.role.lower(), all_roles)
|
124 |
+
logging.info(f"[QDRANT DEBUG] Role requested: {job.role.lower()}")
|
125 |
+
logging.info(f"[QDRANT DEBUG] Questions retrieved: {len(retrieved_data)}")
|
126 |
+
if retrieved_data:
|
127 |
+
logging.info(f"[QDRANT DEBUG] Sample Q: {retrieved_data[0]['question']}")
|
128 |
+
else:
|
129 |
+
logging.warning("[QDRANT DEBUG] No questions retrieved, falling back to defaults")
|
130 |
|
131 |
+
context_data = random_context_chunks(retrieved_data, k=4) if retrieved_data else ""
|
132 |
+
|
133 |
try:
|
134 |
prompt = f"""
|
135 |
You are conducting an interview for a {job.role} position at {job.company}.
|