sainathBelagavi commited on
Commit
1d4d8a6
·
verified ·
1 Parent(s): 3010e69

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -11
app.py CHANGED
@@ -111,14 +111,20 @@ if prompt := st.chat_input(f"Hi I'm {selected_model}, How can I help you today?"
111
  with st.chat_message("assistant"):
112
  client = InferenceClient(
113
  model=model_links[selected_model], )
114
- output = client.text_generation(
115
- formated_text,
116
- temperature=temp_values, # 0.5
117
- max_new_tokens=3000,
118
- stream=True
119
- )
120
- response = st.write_stream(output)
121
- st.session_state.messages.append({"role": "assistant", "content": response})
122
-
123
- # Save the updated conversation history to the file
124
- save_conversation_history(st.session_state.messages)
 
 
 
 
 
 
 
111
  with st.chat_message("assistant"):
112
  client = InferenceClient(
113
  model=model_links[selected_model], )
114
+ max_new_tokens = 2048 # Adjust this value as needed
115
+ try:
116
+ output = client.text_generation(
117
+ formated_text,
118
+ temperature=temp_values,
119
+ max_new_tokens=max_new_tokens,
120
+ stream=True
121
+ )
122
+ response = st.write_stream(output)
123
+ except ValueError as e:
124
+ if "Input validation error" in str(e):
125
+ st.error("Error: The input prompt is too long. Please try a shorter prompt.")
126
+ else:
127
+ st.error(f"An error occurred: {e}")
128
+ else:
129
+ st.session_state.messages.append({"role": "assistant", "content": response})
130
+ save_conversation_history(st.session_state.messages)