Spaces:
Runtime error
Runtime error
AbstractPhil
commited on
Commit
·
7f8b6c0
1
Parent(s):
3e87a0a
;yes
Browse files
app.py
CHANGED
@@ -85,7 +85,7 @@ else:
|
|
85 |
harmony_encoding = None
|
86 |
|
87 |
# Stop tokens per Harmony spec: <|return|> (200002), <|call|> (200012)
|
88 |
-
HARMONY_STOP_IDS = [
|
89 |
|
90 |
# Tokenizer is lightweight; load once
|
91 |
try:
|
@@ -214,7 +214,7 @@ def parse_harmony_response(tokens: List[int]) -> Dict[str, str]:
|
|
214 |
channel = msg.channel if hasattr(msg, 'channel') else "final"
|
215 |
if channel not in channels:
|
216 |
channels[channel] = ""
|
217 |
-
channels[channel] += msg.content
|
218 |
|
219 |
# Ensure we have a final channel
|
220 |
if "final" not in channels:
|
@@ -309,17 +309,8 @@ def zerogpu_generate(full_prompt,
|
|
309 |
if "attention_mask" not in inputs:
|
310 |
inputs["attention_mask"] = torch.ones_like(inputs["input_ids"], dtype=torch.long, device=device)
|
311 |
# Generate
|
312 |
-
# Build EOS list: Harmony stops
|
313 |
-
eos_ids =
|
314 |
-
if HARMONY_AVAILABLE:
|
315 |
-
eos_ids.extend(HARMONY_STOP_IDS)
|
316 |
-
tok_eos = tokenizer.eos_token_id
|
317 |
-
if tok_eos is not None:
|
318 |
-
if isinstance(tok_eos, int):
|
319 |
-
eos_ids.append(tok_eos)
|
320 |
-
else:
|
321 |
-
eos_ids.extend(list(tok_eos))
|
322 |
-
eos_ids = list(dict.fromkeys(eos_ids))
|
323 |
|
324 |
out_ids = model.generate(
|
325 |
**inputs,
|
|
|
85 |
harmony_encoding = None
|
86 |
|
87 |
# Stop tokens per Harmony spec: <|return|> (200002), <|call|> (200012)
|
88 |
+
HARMONY_STOP_IDS = harmony_encoding.stop_tokens_for_assistant_actions() if HARMONY_AVAILABLE else []
|
89 |
|
90 |
# Tokenizer is lightweight; load once
|
91 |
try:
|
|
|
214 |
channel = msg.channel if hasattr(msg, 'channel') else "final"
|
215 |
if channel not in channels:
|
216 |
channels[channel] = ""
|
217 |
+
channels[channel] += "".join([getattr(part, "text", str(part)) for part in (msg.content if isinstance(msg.content, list) else [msg.content])])
|
218 |
|
219 |
# Ensure we have a final channel
|
220 |
if "final" not in channels:
|
|
|
309 |
if "attention_mask" not in inputs:
|
310 |
inputs["attention_mask"] = torch.ones_like(inputs["input_ids"], dtype=torch.long, device=device)
|
311 |
# Generate
|
312 |
+
# Build EOS list: use ONLY Harmony assistant-action stops (per OpenAI docs)
|
313 |
+
eos_ids = HARMONY_STOP_IDS if HARMONY_AVAILABLE else tokenizer.eos_token_id
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
314 |
|
315 |
out_ids = model.generate(
|
316 |
**inputs,
|