AutoGrn / app.py
cynnix69's picture
Update app.py
37273e3 verified
import os
import gradio as gr
from openai import OpenAI
from dotenv import load_dotenv
import datetime
import tempfile
# Load environment variables from .env file (for local testing)
load_dotenv()
# Get API key from environment variables
OPENROUTER_API_KEY = os.environ.get("OPENROUTER_API_KEY")
# Initialize OpenAI client with OpenRouter base URL
client = OpenAI(
base_url="https://openrouter.ai/api/v1",
api_key=OPENROUTER_API_KEY
)
# Permanent system prompt (your specific instructions)
SYSTEM_PROMPT = """You are an **AI Code Generator**. Your task is to **write complete, production-ready code** with no explanations unless explicitly asked. Follow these rules:
1. **Always prioritize code** over text. If the user asks for a solution, respond ONLY with:
- A full code block (with imports, error handling, and tests if applicable).
- Brief comments in the code (no paragraphs).
2. **Never provide summaries, plans, or step-by-step guides** unless the user requests them with:
- "Explain..." or "How does this work?"
3. **Assume technical proficiency**: Skip introductory notes (e.g., "Here's how to...")."""
# List of available models
AVAILABLE_MODELS = [
"deepseek/deepseek-chat-v3-0324:free"
]
def generate_response(message, model_name, temperature=0.7, history=None):
"""Generate a response using the selected model via OpenRouter."""
if not message:
return "", []
try:
messages = [{"role": "system", "content": SYSTEM_PROMPT}]
# Add conversation history if available
if history:
for user_msg, bot_msg in history:
messages.append({"role": "user", "content": user_msg})
messages.append({"role": "assistant", "content": bot_msg})
# Add current message
messages.append({"role": "user", "content": message})
response = client.chat.completions.create(
model=model_name,
messages=messages,
temperature=temperature
)
return response.choices[0].message.content
except Exception as e:
return f"Error: {str(e)}"
def save_chat_history(history):
"""Save chat history to a temporary file for download."""
chat_text = f"System Prompt:\n{SYSTEM_PROMPT}\n\n"
chat_text += "Chat History:\n\n"
for user_msg, bot_msg in history:
chat_text += f"User: {user_msg}\n"
chat_text += f"Assistant: {bot_msg}\n\n"
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"code_generator_chat_{timestamp}.txt"
temp_file = tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".txt")
temp_file.write(chat_text)
temp_file.close()
return temp_file.name
with gr.Blocks(title="AI Code Generator") as demo:
gr.Markdown("# AI Code Generator (via OpenRouter)")
gr.Markdown("### System prompt is locked to code-generation mode")
# Store chat history
chat_history = gr.State([])
download_file = gr.State(None)
with gr.Row():
with gr.Column(scale=3):
model_dropdown = gr.Dropdown(
choices=AVAILABLE_MODELS,
value=AVAILABLE_MODELS[0],
label="Select Model"
)
temperature_slider = gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.7,
step=0.1,
label="Temperature"
)
with gr.Row():
with gr.Column(scale=4):
chatbot = gr.Chatbot(height=400)
user_input = gr.Textbox(
label="Your Message",
placeholder="Type your coding request here...",
lines=3
)
with gr.Row():
submit_btn = gr.Button("Generate Code", variant="primary")
download_btn = gr.DownloadButton("Download Chat", visible=False)
clear_btn = gr.ClearButton([user_input, chatbot])
def respond(message, history, model_name, temperature):
response = generate_response(message, model_name, temperature, history)
updated_history = history + [(message, response)]
file_path = save_chat_history(updated_history)
return updated_history, updated_history, gr.DownloadButton(value=file_path, visible=True)
submit_btn.click(
respond,
[user_input, chat_history, model_dropdown, temperature_slider],
[chat_history, chatbot, download_btn]
).then(
lambda: "", None, user_input
)
user_input.submit(
respond,
[user_input, chat_history, model_dropdown, temperature_slider],
[chat_history, chatbot, download_btn]
).then(
lambda: "", None, user_input
)
demo.launch()