File size: 1,922 Bytes
84bc138
d31a363
84bc138
5c62f99
84bc138
7e2083c
d31a363
5c62f99
84bc138
fb85604
5c62f99
 
57374a9
fb85604
5c62f99
fb85604
5c62f99
84bc138
5c62f99
100dadd
84bc138
5c62f99
 
 
 
 
 
 
 
 
84bc138
5c62f99
 
 
 
 
 
8421e19
84bc138
5c62f99
 
57374a9
5c62f99
fb85604
5c62f99
d31a363
 
 
 
 
 
 
5c62f99
d31a363
5c62f99
d31a363
 
 
100dadd
d31a363
 
 
b908f19
5c62f99
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import os
import requests

GROQ_API_KEY = os.getenv("GROQ_API_KEY") or "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"

def summarize_match(job_description, cv_names, cv_snippets):
    if not GROQ_API_KEY:
        return "❌ GROQ_API_KEY not set."

    try:
        # Limit content length per CV to avoid token overflow
        cv_snippets = [text.strip()[:1500] or "[No content]" for text in cv_snippets[:3]]
        cv_names = [name[:60] for name in cv_names[:3]]

        # Create structured prompt
        prompt = f"""
You are an AI recruitment assistant helping to match candidates to job descriptions.

### Job Description:
{job_description}

### Candidate CVs:
1. {cv_names[0]}:
{cv_snippets[0]}

2. {cv_names[1]}:
{cv_snippets[1]}

3. {cv_names[2]}:
{cv_snippets[2]}

Analyze how well each candidate matches the job requirements, especially in terms of:
- PHP programming
- Software or web development
- Relevant technical experience

Clearly identify which candidates are suitable and why.
""".strip()

        # Debug info (optional)
        print("πŸ“¦ Prompt length:", len(prompt))
        if len(prompt) > 8000:
            return "❌ Prompt too long. Please shorten the CVs or JD."

        # Groq API call
        response = requests.post(
            url="https://api.groq.com/openai/v1/chat/completions",
            headers={
                "Authorization": f"Bearer {GROQ_API_KEY}",
                "Content-Type": "application/json"
            },
            json={
                "model": "mixtral-8x7b-32768",  # Or change to a different supported Groq model
                "messages": [{"role": "user", "content": prompt}],
                "temperature": 0.4
            },
            timeout=30
        )

        response.raise_for_status()
        return response.json()["choices"][0]["message"]["content"]

    except Exception as e:
        return f"❌ Groq API error: {e}"