Spaces:
Sleeping
Sleeping
Enhance app.py by importing the 'os' module for improved environment variable handling. This change supports better configuration management in the application.
557b7cf
| import streamlit as st | |
| from langchain.chains import ConversationChain | |
| from langchain.memory import ConversationBufferMemory | |
| from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline | |
| import PyPDF2 | |
| from dotenv import load_dotenv | |
| import os | |
| # Load environment variables from .env | |
| load_dotenv() | |
| # Job API keys and endpoints | |
| access_token = os.getenv("API_KEY") | |
| # Streamlit App Title | |
| st.title("Job Description and CV-Based Email Generator") | |
| st.write(""" | |
| This app uses Hugging Face's Gemma model to generate a professional email based on a pre-parsed CV and a job description. | |
| Upload your CV once in the sidebar, and the system will reuse the parsed details for generating emails. | |
| """) | |
| # Sidebar for Settings and CV Upload | |
| st.sidebar.title("Settings and CV Upload") | |
| access_token = st.sidebar.text_input("Enter your Hugging Face Access Token", type="password") | |
| # File Upload for CV in Sidebar | |
| uploaded_file = st.sidebar.file_uploader("Upload your CV (PDF format):", type=["pdf"]) | |
| if "parsed_cv" not in st.session_state: | |
| st.session_state.parsed_cv = None | |
| if uploaded_file is not None: | |
| try: | |
| # Extract text from PDF | |
| pdf_reader = PyPDF2.PdfReader(uploaded_file) | |
| cv_text = "".join([page.extract_text() for page in pdf_reader.pages]) | |
| st.sidebar.success("CV uploaded and text extracted successfully!") | |
| # Parse CV details and save to session state | |
| def parse_cv(cv_text): | |
| # Basic parsing logic (can be extended for specific details) | |
| return f""" | |
| Name: [Extracted Name] | |
| Contact Information: [Extracted Contact Info] | |
| Skills: [Extracted Skills] | |
| Experience: [Extracted Experience] | |
| Education: [Extracted Education] | |
| Summary: {cv_text[:500]}... # Truncated summary of the CV | |
| """ | |
| st.session_state.parsed_cv = parse_cv(cv_text) | |
| st.sidebar.success("CV parsed successfully!") | |
| except Exception as e: | |
| st.sidebar.error(f"Failed to extract text from CV: {e}") | |
| if st.session_state.parsed_cv: | |
| st.sidebar.write("### Parsed CV Details:") | |
| st.sidebar.text(st.session_state.parsed_cv) | |
| # Ensure Access Token is Provided | |
| if access_token: | |
| def initialize_pipeline(access_token): | |
| tokenizer = AutoTokenizer.from_pretrained("google/gemma-2b-it", token=access_token) | |
| model = AutoModelForCausalLM.from_pretrained( | |
| "google/gemma-2b-it", | |
| torch_dtype="bfloat16", | |
| token=access_token | |
| ) | |
| return pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=512) | |
| text_gen_pipeline = initialize_pipeline(access_token) | |
| def initialize_chain(): | |
| memory = ConversationBufferMemory() | |
| return ConversationChain(llm=None, memory=memory) # No LLM; handled by pipeline | |
| conversation_chain = initialize_chain() | |
| # Input job description | |
| job_description = st.text_area("Enter the job description:", "") | |
| # Display generated email | |
| if st.button("Generate Email"): | |
| if st.session_state.parsed_cv and job_description.strip(): | |
| # Prompt for email generation | |
| prompt = ( | |
| f"Based on the following CV details:\n\n{st.session_state.parsed_cv}\n\n" | |
| f"And the following job description:\n\n{job_description}\n\n" | |
| f"Write a professional email expressing interest in the job. " | |
| f"Make it concise, polite, and tailored to the job." | |
| ) | |
| # Generate email using Hugging Face pipeline | |
| response = text_gen_pipeline(prompt)[0]['generated_text'] | |
| # Update memory with job description and response | |
| conversation_chain.memory.save_context({"job_description": job_description}, {"email": response}) | |
| # Display response | |
| st.subheader("Generated Email:") | |
| st.write(response) | |
| # Display conversation history | |
| st.subheader("History:") | |
| st.write(conversation_chain.memory.buffer) | |
| else: | |
| st.warning("Please upload your CV in the sidebar and enter a job description.") | |
| else: | |
| st.warning("Please enter your Hugging Face access token in the sidebar to use the app.") | |