Spaces:
Running
Running
Update src/app_job_copy_1.py
Browse files- src/app_job_copy_1.py +1 -1
src/app_job_copy_1.py
CHANGED
@@ -215,7 +215,7 @@ def call_llm(candidate_data, job_data, llm_chain):
|
|
215 |
response = llm_chain.invoke(payload)
|
216 |
# print(candidate_data.get("Experience", "")) # Kept for your debugging if needed
|
217 |
|
218 |
-
response_str = f"candidate_name: {response.candidate_name}
|
219 |
output_tokens = calculate_tokens(response_str, st.session_state.model_name)
|
220 |
|
221 |
if 'total_input_tokens' not in st.session_state: st.session_state.total_input_tokens = 0
|
|
|
215 |
response = llm_chain.invoke(payload)
|
216 |
# print(candidate_data.get("Experience", "")) # Kept for your debugging if needed
|
217 |
|
218 |
+
response_str = f"candidate_name: {response.candidate_name} URL:{response.candidate_url} summ:{response.candidate_summary} loc: {response.candidate_location} just {response.justification} fit_score: {float(f'{response.fit_score:.3f}')}." # Truncated
|
219 |
output_tokens = calculate_tokens(response_str, st.session_state.model_name)
|
220 |
|
221 |
if 'total_input_tokens' not in st.session_state: st.session_state.total_input_tokens = 0
|