File size: 3,689 Bytes
e2c65bd e5f5669 e2c65bd e5f5669 e2c65bd e5f5669 79b83af e2c65bd e5f5669 e2c65bd e5f5669 e2c65bd e5f5669 e2c65bd e5f5669 e2c65bd e5f5669 e2c65bd e5f5669 e2c65bd e5f5669 e2c65bd e5f5669 e2c65bd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 |
import gradio as gr
import openai
from openai import OpenAI
import os
import time
def setup_client():
api_key = os.environ.get('OPENROUTER_API_KEY')
if not api_key:
raise ValueError("OPENROUTER_API_KEY environment variable not set")
return OpenAI(
base_url="https://openrouter.ai/api/v1",
api_key=api_key,
)
def chat_with_grok_streaming(message, history):
"""Main chat function with streaming"""
if not message:
return history, ""
# Add logging
print(f"User message: {message}")
# Add user message immediately
history.append((message, ""))
try:
client = setup_client()
# Create the streaming completion
stream = client.chat.completions.create(
model="x-ai/grok-4",
messages=[
{
"role": "user",
"content": message
}
],
max_tokens=1000,
temperature=0.7,
stream=True # Enable streaming
)
# Stream the response
response = ""
for chunk in stream:
if chunk.choices[0].delta.content is not None:
response += chunk.choices[0].delta.content
# Update the last message in history with the streaming response
history[-1] = (message, response)
yield history, ""
time.sleep(0.05) # Small delay to make streaming visible
# Final update and logging
history[-1] = (message, response)
print(f"AI response: {response}")
yield history, ""
except Exception as e:
error_msg = f"Error: {str(e)}"
print(f"Error occurred: {str(e)}")
history[-1] = (message, error_msg)
yield history, ""
def clear_chat():
"""Clear the chat history"""
return [], ""
# Create the Gradio interface
with gr.Blocks(title="Grok 4 Chat Interface by Xhaheen ", theme=gr.themes.Soft()) as demo:
gr.HTML("""
<div style="text-align: center; padding: 20px;">
<h1>π€ Grok 4 Chat Interface</h1>
<p>Chat with xAI's Grok 4 model via OpenRouter (Streaming)</p>
</div>
""")
with gr.Row():
with gr.Column(scale=1):
clear_btn = gr.Button("ποΈ Clear Chat", variant="secondary", size="large")
with gr.Column(scale=3):
chatbot = gr.Chatbot(
label="Chat with Grok 4",
height=500,
show_copy_button=True
)
msg_input = gr.Textbox(
label="Message",
placeholder="Type your message here...",
lines=2,
max_lines=5
)
send_btn = gr.Button("Send π", variant="primary", size="large")
# Event handlers
def submit_message(message, history):
if message:
# Use yield from for streaming
yield from chat_with_grok_streaming(message, history)
else:
yield history, message
# Send message on button click
send_btn.click(
submit_message,
inputs=[msg_input, chatbot],
outputs=[chatbot, msg_input]
)
# Send message on Enter key
msg_input.submit(
submit_message,
inputs=[msg_input, chatbot],
outputs=[chatbot, msg_input]
)
# Clear chat
clear_btn.click(
clear_chat,
outputs=[chatbot, msg_input]
)
# Launch the interface - Remove share=True for Hugging Face
if __name__ == "__main__":
demo.launch() |