Spaces:
Running
on
Zero
Running
on
Zero
AbstractPhil
commited on
Commit
·
6e6f48b
1
Parent(s):
c852c8b
yes
Browse files
app.py
CHANGED
@@ -4,6 +4,7 @@ from typing import Optional, Dict, Any, List
|
|
4 |
import gradio as gr
|
5 |
import spaces
|
6 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
|
7 |
|
8 |
MODEL_ID = "openai/gpt-oss-20b"
|
9 |
ADAPTER_ID = "AbstractPhil/mirel-gpt-oss-20b"
|
@@ -37,6 +38,13 @@ def gpu_generate(prompt_str: str, max_new_tokens: int = 512) -> str:
|
|
37 |
token=HF_TOKEN,
|
38 |
)
|
39 |
model = AutoModelForCausalLM.from_pretrained(MODEL_ID, **model_kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
model.eval()
|
41 |
model.config.pad_token_id = tokenizer.pad_token_id
|
42 |
|
@@ -81,7 +89,7 @@ def ui_generate(message, history):
|
|
81 |
return gpu_generate(prompt)
|
82 |
|
83 |
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
84 |
-
gr.Markdown("""# Mirel –
|
85 |
gr.ChatInterface(fn=ui_generate, type="messages", title="Mirel", cache_examples=False)
|
86 |
|
87 |
if __name__ == "__main__":
|
|
|
4 |
import gradio as gr
|
5 |
import spaces
|
6 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
7 |
+
from peft import PeftModel
|
8 |
|
9 |
MODEL_ID = "openai/gpt-oss-20b"
|
10 |
ADAPTER_ID = "AbstractPhil/mirel-gpt-oss-20b"
|
|
|
38 |
token=HF_TOKEN,
|
39 |
)
|
40 |
model = AutoModelForCausalLM.from_pretrained(MODEL_ID, **model_kwargs)
|
41 |
+
# Apply Rose LoRA (minimal)
|
42 |
+
if ADAPTER_ID:
|
43 |
+
peft_kwargs: Dict[str, Any] = {"is_trainable": False, "token": HF_TOKEN}
|
44 |
+
if ADAPTER_SUBFOLDER:
|
45 |
+
peft_kwargs["subfolder"] = ADAPTER_SUBFOLDER
|
46 |
+
model = PeftModel.from_pretrained(model, ADAPTER_ID, **peft_kwargs)
|
47 |
+
model = model.merge_and_unload()
|
48 |
model.eval()
|
49 |
model.config.pad_token_id = tokenizer.pad_token_id
|
50 |
|
|
|
89 |
return gpu_generate(prompt)
|
90 |
|
91 |
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
92 |
+
gr.Markdown("""# Mirel – GPT‑OSS‑20B + Rose LoRA (ZeroGPU, Minimal)""")
|
93 |
gr.ChatInterface(fn=ui_generate, type="messages", title="Mirel", cache_examples=False)
|
94 |
|
95 |
if __name__ == "__main__":
|