Spaces:
Sleeping
Sleeping
File size: 1,971 Bytes
e40a764 512a568 e40a764 512a568 e40a764 512a568 e40a764 512a568 e40a764 512a568 e40a764 512a568 e40a764 512a568 e40a764 512a568 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
import os
import json
import requests
import gradio as gr
# Load tool prompts
with open("system_prompts.json", "r", encoding="utf-8") as f:
SYSTEM_PROMPTS = json.load(f)
TOOLS = list(SYSTEM_PROMPTS.keys())
# These are your HF Inference Endpoint settings
HF_API_URL = os.environ["HF_API_URL"]
HF_API_TOKEN = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.2"
def run_mistral(prompt: str, text: str) -> str:
"""
Calls the Hugging Face Inference Endpoint for Mistral using REST.
Sends 'prompt' as system message and 'text' as user message.
For reference, HF Inference endpoints accept JSON payloads like this :contentReference[oaicite:1]{index=1}.
"""
headers = {
"Authorization": f"Bearer {HF_API_TOKEN}",
"Content-Type": "application/json"
}
payload = {
"inputs": [
{"role": "system", "content": prompt},
{"role": "user", "content": text}
],
"parameters": {
"max_new_tokens": 512,
"temperature": 0.3,
"return_full_text": False
}
}
resp = requests.post(HF_API_URL, headers=headers, json=payload, timeout=60)
resp.raise_for_status()
out = resp.json()
# HF returns a list for chat models
if isinstance(out, list) and "generated_text" in out[0]:
return out[0]["generated_text"]
# else a dict
return out.get("generated_text", json.dumps(out))
def ecoling_tool(tool: str, text: str) -> str:
# Looks up the prompt and calls the endpoint
return run_mistral(SYSTEM_PROMPTS[tool], text)
demo = gr.Interface(
fn=ecoling_tool,
inputs=[gr.Dropdown(TOOLS, label="Tool"), gr.Textbox(lines=8, label="Input Text")],
outputs=[gr.Textbox(label="Response")],
title="🌱 Ecolinguistic MCP Server",
description="Accessible via UI and standard MCP; powered by a HF Mistral endpoint."
)
if __name__ == "__main__":
demo.launch(mcp_server=True)
|