neovalle commited on
Commit
1f00830
·
verified ·
1 Parent(s): c0722e5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -21
app.py CHANGED
@@ -2,14 +2,12 @@ import os, json
2
  import gradio as gr
3
  from huggingface_hub import InferenceClient
4
 
5
- # Load prompts
6
  with open("system_prompts.json", encoding="utf-8") as f:
7
  SYSTEM_PROMPTS = json.load(f)
8
 
9
- # Initialize HF InferenceClient
10
  client = InferenceClient(model=os.getenv("HF_MODEL"), token=os.getenv("HF_TOKEN"))
11
 
12
- # Common runner
13
  def run_model(prompt: str, text: str) -> str:
14
  resp = client.chat.completions.create(
15
  messages=[{"role":"system","content":prompt},{"role":"user","content":text}],
@@ -17,27 +15,15 @@ def run_model(prompt: str, text: str) -> str:
17
  )
18
  return resp.choices[0].message.content
19
 
20
- # Dynamically create tool functions
21
- def make_tool_fn(prompt):
22
- return lambda text: run_model(prompt, text)
23
-
24
- tool_fns = {
25
- name.lower().replace(" ", "_"): make_tool_fn(prompt)
26
- for name, prompt in SYSTEM_PROMPTS.items()
27
- }
28
-
29
- # Build Gradio interface
30
  demo = gr.Blocks()
31
  with demo:
32
  gr.Markdown("# 🌱 Ecolinguistic MCP Server")
33
- tabs = gr.Tabs()
34
- for badge, fn in tool_fns.items():
35
- with tabs:
36
- with gr.Tab(badge.replace("_", " ").title()):
37
- inp = gr.Textbox(lines=5, label="Text")
38
- out = gr.Textbox(label="Response")
39
- gr.Button("Run").click(fn, inp, out)
40
 
41
- # Expose as MCP server
42
  if __name__ == "__main__":
43
  demo.launch(mcp_server=True, share=True)
 
2
  import gradio as gr
3
  from huggingface_hub import InferenceClient
4
 
5
+ # Load your prompts
6
  with open("system_prompts.json", encoding="utf-8") as f:
7
  SYSTEM_PROMPTS = json.load(f)
8
 
 
9
  client = InferenceClient(model=os.getenv("HF_MODEL"), token=os.getenv("HF_TOKEN"))
10
 
 
11
  def run_model(prompt: str, text: str) -> str:
12
  resp = client.chat.completions.create(
13
  messages=[{"role":"system","content":prompt},{"role":"user","content":text}],
 
15
  )
16
  return resp.choices[0].message.content
17
 
 
 
 
 
 
 
 
 
 
 
18
  demo = gr.Blocks()
19
  with demo:
20
  gr.Markdown("# 🌱 Ecolinguistic MCP Server")
21
+ for name, prompt in SYSTEM_PROMPTS.items():
22
+ func_name = name.lower().replace(" ", "_")
23
+ textbox = gr.Textbox(lines=5, label="Text")
24
+ output = gr.Textbox(label="Response")
25
+ gr.Button("Run", api_name=func_name)\
26
+ .click(lambda txt, p=prompt: run_model(p, txt), inputs=textbox, outputs=output)
 
27
 
 
28
  if __name__ == "__main__":
29
  demo.launch(mcp_server=True, share=True)