File size: 2,663 Bytes
c96734b
1ca78b8
 
 
c96734b
1ca78b8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c96734b
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
import gradio as gr
import requests
import json
import os

# Set OpenRouter API key in the Space's secrets as "OPENROUTER_API_KEY"
OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY")
HEADERS = {
    "Authorization": f"Bearer {OPENROUTER_API_KEY}",
    "HTTP-Referer": "https://huggingface.co/spaces/YOUR_SPACE",  # Optional
    "X-Title": "CrispChat"  # Optional
}

# List of free OpenRouter models
FREE_MODELS = {
    "Google: Gemini Pro 2.5 Experimental (free)": ("google/gemini-2.5-pro-exp-03-25:free", 1000000),
    "DeepSeek: DeepSeek V3 (free)": ("deepseek/deepseek-chat:free", 131072),
    "Meta: Llama 3.2 11B Vision Instruct (free)": ("meta-llama/llama-3.2-11b-vision-instruct:free", 131072),
    "Qwen: Qwen2.5 VL 72B Instruct (free)": ("qwen/qwen2.5-vl-72b-instruct:free", 131072),
}


def query_openrouter_model(model_id, prompt, image=None):
    messages = [{"role": "user", "content": prompt}]

    # If image is included, add it to the message content as a dict
    if image is not None:
        with open(image, "rb") as f:
            image_bytes = f.read()
            base64_image = base64.b64encode(image_bytes).decode("utf-8")
        messages[0]["content"] = [
            {"type": "text", "text": prompt},
            {"type": "image_url", "image_url": {"url": f"data:image/png;base64,{base64_image}"}}
        ]

    payload = {
        "model": model_id,
        "messages": messages
    }

    response = requests.post(
        url="https://openrouter.ai/api/v1/chat/completions",
        headers=HEADERS,
        data=json.dumps(payload)
    )

    try:
        response.raise_for_status()
        data = response.json()
        return data["choices"][0]["message"]["content"]
    except Exception as e:
        return f"Error: {str(e)}\n{response.text}"


def chat_interface(prompt, image, model_label):
    model_id, _ = FREE_MODELS[model_label]
    return query_openrouter_model(model_id, prompt, image)


with gr.Blocks(title="CrispChat") as demo:
    gr.Markdown("""
    # 🌟 CrispChat 
    Multi-modal chat with free OpenRouter models
    """)

    with gr.Row():
        prompt = gr.Textbox(label="Enter your message", lines=4, placeholder="Ask me anything...")
        image = gr.Image(type="filepath", label="Optional image input")

    model_choice = gr.Dropdown(
        choices=list(FREE_MODELS.keys()),
        value="Google: Gemini Pro 2.5 Experimental (free)",
        label="Select model"
    )

    output = gr.Textbox(label="Response", lines=6)

    submit = gr.Button("Submit")
    submit.click(fn=chat_interface, inputs=[prompt, image, model_choice], outputs=output)

if __name__ == "__main__":
    demo.launch()