CCockrum commited on
Commit
08f9aa9
·
verified ·
1 Parent(s): 9bc6a92

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -29
app.py CHANGED
@@ -82,36 +82,31 @@ def respond(message, history, level, max_tokens, temperature, top_p):
82
  return
83
 
84
  system_message = level_to_prompt(level)
85
- messages = [{"role": "system", "content": system_message}]
86
-
87
- # Handle history based on its format
88
- if history and isinstance(history[0], dict):
89
- # New format (messages with role/content)
90
- messages.extend(history)
91
- else:
92
- # Old format (tuples)
93
- for user, bot in history:
94
- if user:
95
- messages.append({"role": "user", "content": user})
96
- if bot:
97
- messages.append({"role": "assistant", "content": bot})
98
-
99
- # Add current message
100
- messages.append({"role": "user", "content": message})
101
 
102
  # Generate response
103
  response = ""
104
  try:
105
- # Create a proper prompt format
106
- prompt = ""
107
- for msg in messages:
108
- if msg["role"] == "system":
109
- prompt += f"System: {msg['content']}\n"
110
- elif msg["role"] == "user":
111
- prompt += f"User: {msg['content']}\n"
112
- elif msg["role"] == "assistant":
113
- prompt += f"Assistant: {msg['content']}\n"
114
- prompt += "Assistant: "
 
 
 
 
 
 
 
 
 
 
 
115
 
116
  # Generate response with streaming
117
  for token in client.text_generation(
@@ -121,11 +116,15 @@ def respond(message, history, level, max_tokens, temperature, top_p):
121
  temperature=temperature,
122
  top_p=top_p,
123
  do_sample=True,
124
- return_full_text=False
 
125
  ):
126
  if token: # Handle None tokens
127
- response += token
128
- yield response
 
 
 
129
 
130
  except Exception as e:
131
  error_msg = str(e)
@@ -151,6 +150,15 @@ with gr.Blocks(css=css, title="French Tutor") as demo:
151
  gr.Markdown("✅ **Status**: Connected to AI service")
152
 
153
  with gr.Column(elem_id="chat-panel"):
 
 
 
 
 
 
 
 
 
154
  with gr.Accordion("⚙️ Advanced Settings", open=False):
155
  level = gr.Dropdown(
156
  choices=["A1", "A2", "B1", "B2", "C1", "C2"],
 
82
  return
83
 
84
  system_message = level_to_prompt(level)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
 
86
  # Generate response
87
  response = ""
88
  try:
89
+ # Create a proper prompt format for instruction-following models
90
+ prompt = f"<|system|>\n{system_message}\n\n"
91
+
92
+ # Add conversation history
93
+ if history:
94
+ for turn in history:
95
+ if isinstance(turn, dict):
96
+ if turn.get("role") == "user":
97
+ prompt += f"<|user|>\n{turn['content']}\n\n"
98
+ elif turn.get("role") == "assistant":
99
+ prompt += f"<|assistant|>\n{turn['content']}\n\n"
100
+ else:
101
+ # Handle tuple format (user, assistant)
102
+ user_msg, bot_msg = turn
103
+ if user_msg:
104
+ prompt += f"<|user|>\n{user_msg}\n\n"
105
+ if bot_msg:
106
+ prompt += f"<|assistant|>\n{bot_msg}\n\n"
107
+
108
+ # Add current user message
109
+ prompt += f"<|user|>\n{message}\n\n<|assistant|>\n"
110
 
111
  # Generate response with streaming
112
  for token in client.text_generation(
 
116
  temperature=temperature,
117
  top_p=top_p,
118
  do_sample=True,
119
+ return_full_text=False,
120
+ stop_sequences=["<|user|>", "<|system|>"] # Stop if model tries to continue conversation
121
  ):
122
  if token: # Handle None tokens
123
+ # Clean up any unwanted tokens
124
+ token = token.replace("<|user|>", "").replace("<|system|>", "").replace("<|assistant|>", "")
125
+ if token.strip(): # Only add non-empty tokens
126
+ response += token
127
+ yield response
128
 
129
  except Exception as e:
130
  error_msg = str(e)
 
150
  gr.Markdown("✅ **Status**: Connected to AI service")
151
 
152
  with gr.Column(elem_id="chat-panel"):
153
+ gr.Markdown("""
154
+ ### 💬 **Try these example conversations:**
155
+ - "Bonjour! Comment allez-vous?" (Hello! How are you?)
156
+ - "Can you help me conjugate the verb 'être'?"
157
+ - "What's the difference between 'tu' and 'vous'?"
158
+ - "Tell me about French culture"
159
+ - "How do I order food in a French restaurant?"
160
+ """)
161
+
162
  with gr.Accordion("⚙️ Advanced Settings", open=False):
163
  level = gr.Dropdown(
164
  choices=["A1", "A2", "B1", "B2", "C1", "C2"],