Dhahlan2000 commited on
Commit
b524841
·
1 Parent(s): 9a4471a

Enhance app.py to improve error handling during email generation. Added try-except blocks in both conversation_predict and update_ui functions to catch exceptions and display error messages in the Streamlit interface. This change ensures a more robust user experience by providing feedback in case of issues during response generation.

Browse files
Files changed (1) hide show
  1. app.py +27 -22
app.py CHANGED
@@ -113,18 +113,20 @@ def conversation_predict(input_text: str, cv_sections: Dict[str, str]):
113
  prompt = create_email_prompt(input_text, cv_sections)
114
 
115
  # Use the streaming API
116
- for response in client.text_generation(
117
- model="google/gemma-2b-it",
118
- prompt=prompt,
119
- max_new_tokens=512,
120
- temperature=0.7,
121
- top_p=0.95,
122
- stream=True
123
- ):
124
- if hasattr(response, 'token'): # Handle different response formats
125
- yield response.token.text
126
- else:
127
- yield response.generated_text
 
 
128
 
129
  def respond(
130
  message: str,
@@ -182,16 +184,19 @@ def update_ui(message, cv_file, cv_sections):
182
  if message and cv_file and isinstance(cv_sections, dict):
183
  email_text = ""
184
  # Stream the response
185
- for chunk in conversation_predict(message, cv_sections):
186
- if chunk:
187
- email_text += chunk
188
- # Update the text area with each chunk
189
- email_placeholder.text_area(
190
- "Generated Email",
191
- value=email_text,
192
- height=400,
193
- key="email_output"
194
- )
 
 
 
195
  else:
196
  st.warning("Please upload a CV and enter a job description.")
197
 
 
113
  prompt = create_email_prompt(input_text, cv_sections)
114
 
115
  # Use the streaming API
116
+ try:
117
+ for response in client.text_generation(
118
+ model="google/gemma-2b-it",
119
+ prompt=prompt,
120
+ max_new_tokens=512,
121
+ temperature=0.7,
122
+ top_p=0.95,
123
+ stream=True
124
+ ):
125
+ # The streaming response returns text directly
126
+ yield response
127
+ except Exception as e:
128
+ st.error(f"Error generating response: {str(e)}")
129
+ yield ""
130
 
131
  def respond(
132
  message: str,
 
184
  if message and cv_file and isinstance(cv_sections, dict):
185
  email_text = ""
186
  # Stream the response
187
+ try:
188
+ for chunk in conversation_predict(message, cv_sections):
189
+ if chunk:
190
+ email_text += chunk
191
+ # Update the text area with each chunk
192
+ email_placeholder.text_area(
193
+ "Generated Email",
194
+ value=email_text,
195
+ height=400,
196
+ key="email_output"
197
+ )
198
+ except Exception as e:
199
+ st.error(f"Error during email generation: {str(e)}")
200
  else:
201
  st.warning("Please upload a CV and enter a job description.")
202