david-thrower's picture
Rename app file.
c6f20fa
raw
history blame
6.71 kB
from time import sleep
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
# from smolagents import ToolCallingAgent
import torch
from duckduckgo_search import DDGS
# Load the SmolLM model and tokenizer
model_name = "HuggingFaceTB/SmolLM2-360M-Instruct"
model = AutoModelForCausalLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.to(device)
class Role:
def __init__(self, company_name, company_url, job_description, clean_job_description, job_title):
self.company_name = company_name
self.company_url = company_url
self.job_description = job_description
self.clean_job_description = clean_job_description
self.job_title = job_title
def smol_lm_jd_process(job_description, system_prompt, max_new_tokens=512):
prompt = f"""<|im_start|>system
{system_prompt}<|im_end|>
<|im_start|>user
{job_description}<|im_end|>
<|im_start|>assistant
"""
inputs = tokenizer(prompt, return_tensors="pt").to(device)
output = model.generate(**inputs, max_new_tokens=max_new_tokens, do_sample=True, temperature=0.6, top_k=40, top_p=0.9, repetition_penalty=1.1)
response = tokenizer.decode(output[0], skip_special_tokens=False)
start_idx = response.find("<|im_start|>assistant")
end_idx = response.find("<|im_end|>", start_idx)
response = response[start_idx + len("<|im_start|>assistant\n"):end_idx].strip()
return response
return smol_lm_jd_process(job_description, system_prompt_get_job_title)
def process_job_description(company_name, company_url, job_description):
# Step 2: Extract key qualifications, skills, and requirements
system_prompt_requirements = "Extract key qualifications, skills, and requirements from this job description. Output as bullet points. Remove benefits/salary, bragging about the company, and other fluff not relevant to the skills, qualifications, and job requirements. ONLY INCLUDE INFORMATION THAT TELLS THE USER WHAT SKILLS THE EMPLOYER SEEKS."
role_requirements = smol_lm_jd_process(job_description, system_prompt_requirements)
# Step 3: Create a concise summary of the job description
system_prompt_summary = "Create a concise 150-200 word summary of this job description. Remove company bragging bragging about the company, and other fluff not relevant to the position and what is desired from the candidate. FOCUS ON ASPECTS THAT POINT THE USER IN WHAT THE EMPLOYER WANTS FROM A CANDIDATE IN TERMS OF SKILLS, ACCOMPLISHMENTS, AND SUCH"
clean_job_description = smol_lm_jd_process(job_description, system_prompt_summary)
system_prompt_get_job_title = "Extract only the job title from the following job description. Respond with nothing but the job title—no labels, no comments, no summaries, no locations, or extra text. If the title is unusually long or nonstandard, replace it with the most common, concise, and widely recognized job title for the role. Your answer must be 7 words or fewer, with no punctuation, newlines, or additional information. Acceptable examples may look like: 'Systems Analyst', 'marketing director', 'patient advocate III', ..."
job_title = smol_lm_jd_process(job_description, system_prompt_get_job_title, max_new_tokens=150)[:50].lower().replace("job","").replace("title","").replace("\n","").replace(":","")
role = Role(company_name, company_url, job_description, clean_job_description, job_title)
# Step 4: Company Research
searches = {
"company_values": f"{role.company_name} company values",
"corporate_culture": f"{role.company_name} corporate culture",
"leadership_team": f"{role.company_name} leadership team members relevant to {role.job_title} role",
"recent_news": f"{role.company_name} recent news relevant to {role.job_title} role",
"competitive_advantages": f"{role.company_name} competitive advantages in {role.job_title} market"
}
search_client = DDGS()
search_results = {}
for key, query in searches.items():
print(f"searching {query}")
try:
results = search_client.text(query,
max_results=3)
print(f"searching {query} successful")
search_results[key] = results
except Exception as exc:
print(f"Rate limit error, will wait and retry searching {query}.")
sleep(5)
results = search_client.text(query,
max_results=3)
print(f"searching {query} successful")
search_results[key] = results
sleep(3)
# Summarize search results using SmolLM
summaries = {}
system_prompt_summary_search = "Summarize the following search results in 150 tokens or less."
for key, results in search_results.items():
search_result_text = "\n".join([result['body'] for result in results])
summary = smol_lm_jd_process(search_result_text, system_prompt_summary_search, max_new_tokens=150)
summaries[key] = summary
return {
"Company Name": company_name,
"Company URL": company_url,
"job_title": job_title,
"Original Job Description": job_description,
"Role Requirements": role_requirements,
"Clean Job Description": clean_job_description,
"Company Research": {
"Company Values Search Results": search_results['company_values'],
"Company Values Summary": summaries['company_values'],
"Corporate Culture Search Results": search_results['corporate_culture'],
"Corporate Culture Summary": summaries['corporate_culture'],
"Leadership Team Search Results": search_results['leadership_team'],
"Leadership Team Summary": summaries['leadership_team'],
"Recent News Search Results": search_results['recent_news'],
"Recent News Summary": summaries['recent_news'],
"Competitive Advantages Search Results": search_results['competitive_advantages'],
"Competitive Advantages Summary": summaries['competitive_advantages'],
}
}
# Create the Gradio app
demo = gr.Blocks()
with demo:
gr.Markdown("# Job Description Input")
company_name = gr.Textbox(label="Company Name")
company_url = gr.Textbox(label="Company URL")
job_description = gr.TextArea(label="Paste Job Description")
gr.Button("Submit").click(
process_job_description,
inputs=[company_name, company_url, job_description],
outputs=gr.JSON(label="Output")
)
if __name__ == "__main__":
demo.launch()