AbstractPhil commited on
Commit
dd4aeba
·
1 Parent(s): b3580b2
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -138,7 +138,7 @@ def _load_model_on(device_map: Optional[str]) -> AutoModelForCausalLM:
138
  # peft_kwargs["subfolder"] = ADAPTER_SUBFOLDER
139
  # model = PeftModel.from_pretrained(model, ADAPTER_ID, is_trainable=False, **peft_kwargs)
140
 
141
-
142
  model.eval()
143
  # Ensure a valid pad_token_id is set; some OSS checkpoints reuse eos as pad
144
  if getattr(model.config, "pad_token_id", None) is None:
@@ -697,6 +697,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
697
  do_sample, seed, rose_enable, rose_alpha, rose_score,
698
  rose_tokens, rose_json, show_thinking, reasoning_effort
699
  ],
 
700
  title="Chat with Mirel",
701
  description="A chain-of-thought model using Harmony format",
702
  examples=[
 
138
  # peft_kwargs["subfolder"] = ADAPTER_SUBFOLDER
139
  # model = PeftModel.from_pretrained(model, ADAPTER_ID, is_trainable=False, **peft_kwargs)
140
 
141
+
142
  model.eval()
143
  # Ensure a valid pad_token_id is set; some OSS checkpoints reuse eos as pad
144
  if getattr(model.config, "pad_token_id", None) is None:
 
697
  do_sample, seed, rose_enable, rose_alpha, rose_score,
698
  rose_tokens, rose_json, show_thinking, reasoning_effort
699
  ],
700
+
701
  title="Chat with Mirel",
702
  description="A chain-of-thought model using Harmony format",
703
  examples=[