File size: 4,807 Bytes
9369350 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 |
import os
import gradio as gr
from openai import OpenAI
from dotenv import load_dotenv
import datetime
import tempfile
# Load environment variables from .env file (for local testing)
load_dotenv()
# Get API key from environment variables
OPENROUTER_API_KEY = os.environ.get("OPENROUTER_API_KEY")
# Initialize OpenAI client with OpenRouter base URL
client = OpenAI(
base_url="https://openrouter.ai/api/v1",
api_key=OPENROUTER_API_KEY
)
# Permanent system prompt (your specific instructions)
SYSTEM_PROMPT = """You are an **AI Code Generator**. Your task is to **write complete, production-ready code** with no explanations unless explicitly asked. Follow these rules:
1. **Always prioritize code** over text. If the user asks for a solution, respond ONLY with:
- A full code block (with imports, error handling, and tests if applicable).
- Brief comments in the code (no paragraphs).
2. **Never provide summaries, plans, or step-by-step guides** unless the user requests them with:
- "Explain..." or "How does this work?"
3. **Assume technical proficiency**: Skip introductory notes (e.g., "Here's how to...")."""
# List of available models
AVAILABLE_MODELS = [
"deepseek/deepseek-chat-v3-0324:free"
]
def generate_response(message, model_name, temperature=0.7, history=None):
"""Generate a response using the selected model via OpenRouter."""
if not message:
return "", []
try:
messages = [{"role": "system", "content": SYSTEM_PROMPT}]
# Add conversation history if available
if history:
for user_msg, bot_msg in history:
messages.append({"role": "user", "content": user_msg})
messages.append({"role": "assistant", "content": bot_msg})
# Add current message
messages.append({"role": "user", "content": message})
response = client.chat.completions.create(
model=model_name,
messages=messages,
temperature=temperature
)
return response.choices[0].message.content
except Exception as e:
return f"Error: {str(e)}"
def save_chat_history(history):
"""Save chat history to a temporary file for download."""
chat_text = f"System Prompt:\n{SYSTEM_PROMPT}\n\n"
chat_text += "Chat History:\n\n"
for user_msg, bot_msg in history:
chat_text += f"User: {user_msg}\n"
chat_text += f"Assistant: {bot_msg}\n\n"
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"code_generator_chat_{timestamp}.txt"
temp_file = tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".txt")
temp_file.write(chat_text)
temp_file.close()
return temp_file.name
with gr.Blocks(title="AI Code Generator") as demo:
gr.Markdown("# AI Code Generator (via OpenRouter)")
gr.Markdown("### System prompt is locked to code-generation mode")
# Store chat history
chat_history = gr.State([])
download_file = gr.State(None)
with gr.Row():
with gr.Column(scale=3):
model_dropdown = gr.Dropdown(
choices=AVAILABLE_MODELS,
value=AVAILABLE_MODELS[0],
label="Select Model"
)
temperature_slider = gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.7,
step=0.1,
label="Temperature"
)
with gr.Row():
with gr.Column(scale=4):
chatbot = gr.Chatbot(height=400)
user_input = gr.Textbox(
label="Your Message",
placeholder="Type your coding request here...",
lines=3
)
with gr.Row():
submit_btn = gr.Button("Generate Code", variant="primary")
download_btn = gr.DownloadButton("Download Chat", visible=False)
clear_btn = gr.ClearButton([user_input, chatbot])
def respond(message, history, model_name, temperature):
response = generate_response(message, model_name, temperature, history)
updated_history = history + [(message, response)]
file_path = save_chat_history(updated_history)
return updated_history, updated_history, gr.DownloadButton(value=file_path, visible=True)
submit_btn.click(
respond,
[user_input, chat_history, model_dropdown, temperature_slider],
[chat_history, chatbot, download_btn]
).then(
lambda: "", None, user_input
)
user_input.submit(
respond,
[user_input, chat_history, model_dropdown, temperature_slider],
[chat_history, chatbot, download_btn]
).then(
lambda: "", None, user_input
)
demo.launch() |